{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# COMPSCI 389: Introduction to Machine Learning\n", "# Data Processing Example\n", "\n", "The code below runs gradient descent to minimize the sample mean squared error when using a linear parametric model, with the second-degree (order) polynomial basis.\n", "\n", "#### Unlike before (notebook 15), this code applies the standardization preprocessing step to rescale the features.\n", "\n", "The first code block defines the various functions for this. I recommend skipping down to the next markdown block." ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "import pandas as pd\n", "import matplotlib.pyplot as plt\n", "from sklearn.base import BaseEstimator\n", "from sklearn.model_selection import train_test_split\n", "\n", "####################################################################\n", "### NOTE: Below we added StandardScaler\n", "####################################################################\n", "from sklearn.preprocessing import PolynomialFeatures, StandardScaler\n", "\n", "# Function to calculate mean squared error (for evaluation)\n", "def mean_squared_error(predictions, labels):\n", " return np.mean((predictions - labels) ** 2)\n", "\n", "# Function to calculate gradients\n", "def compute_gradients(X, y, weights):\n", " predictions = X.dot(weights)\n", " errors = predictions - y\n", " return 2 / X.shape[0] * X.T.dot(errors)\n", "\n", "class PolynomialRegressionGD(BaseEstimator):\n", " def __init__(self, learning_rate, iterations=1000, polynomial_degree=2):\n", " self.learning_rate = learning_rate\n", " self.iterations = iterations\n", " self.polynomial_degree = polynomial_degree\n", "\n", " def fit(self, X, y):\n", " ####################################################################\n", " ### NOTE: The lines below are new - they apply standardization\n", " ####################################################################\n", " # Standardize features and store the scaler\n", " self.scaler_ = StandardScaler().fit(X)\n", " X_scaled = self.scaler_.transform(X)\n", " \n", " # Expand features into polynomial basis and store the transformer\n", " self.poly = PolynomialFeatures(degree=self.polynomial_degree)\n", " X_poly = self.poly.fit_transform(X_scaled) # Use standardized features\n", "\n", " # Get the number of features\n", " numFeatures = X_poly.shape[1]\n", "\n", " # Initialize weights and loss history\n", " self.weights = np.zeros(numFeatures)\n", " self.loss_history = []\n", "\n", " # Print the initial loss\n", " predictions = X_poly.dot(self.weights)\n", " loss = mean_squared_error(predictions, y)\n", " print(f\"Iteration 0/{self.iterations}, Loss: {loss:.4f}\")\n", "\n", " for i in range(1, self.iterations + 1):\n", " # Compute the gradient of the loss function\n", " gradients = compute_gradients(X_poly, y, self.weights)\n", "\n", " # Update the weights using gradient descent\n", " self.weights -= self.learning_rate * gradients\n", "\n", " # Compute, print, and store the resulting loss\n", " loss = mean_squared_error(X_poly.dot(self.weights), y)\n", " self.loss_history.append(loss)\n", " print(f\"Iteration {i}/{self.iterations}, Loss: {loss:.4f}\")\n", "\n", " return self\n", "\n", " def predict(self, X):\n", " ####################################################################\n", " ### NOTE: The line below is new - it applies standardization.\n", " ### NOTE: We don't call \"fit\" again! We want to use the same \n", " ### transformation used during training.\n", " ####################################################################\n", " # Standardize the input features using the stored scaler\n", " X_scaled = self.scaler_.transform(X)\n", " # Transform standardized features into the polynomial basis\n", " X_poly = self.poly.transform(X_scaled)\n", " return X_poly.dot(self.weights)\n", "\n", "# Load the data set\n", "df = pd.read_csv(\"data/GPA.csv\", delimiter=',')\n", "\n", "# Split the data into features and labels\n", "X = df.iloc[:, :-1]\n", "y = df.iloc[:, -1]\n", "\n", "# Split the data into training and testing sets\n", "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.4, shuffle=True)\n", "\n", "def run(alpha):\n", " iterations = 1000\n", " polynomial_degree = 2\n", "\n", " # Initialize and fit the model\n", " model = PolynomialRegressionGD(\n", " learning_rate=alpha,\n", " iterations=iterations,\n", " polynomial_degree=polynomial_degree\n", " )\n", " model.fit(X_train, y_train)\n", "\n", " # Plotting the loss over iterations\n", " plt.plot(range(1, iterations + 1), model.loss_history)\n", " plt.xlabel('Iterations')\n", " plt.ylabel('Mean Squared Error')\n", " plt.yscale('log')\n", " plt.title(f'Gradient Descent Loss, Polynomial Degree: {polynomial_degree}')\n", " plt.show()\n", "\n", " # Predict on the test set\n", " predictions = model.predict(X_test)\n", "\n", " # Calculate MSE on the test set\n", " mse_test = mean_squared_error(predictions, y_test)\n", " print(f\"Test MSE: {mse_test:.4f}\")\n", "\n", " # Calculate the standard error of the MSE\n", " squared_errors = (predictions - y_test) ** 2\n", " std_error = np.std(squared_errors) / np.sqrt(len(squared_errors))\n", " print(f\"Standard Error of MSE: {std_error:.4f}\")\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "The `run` function takes the step size (learning rate) `alpha` as its one argument. It then runs 1,000 iterations of gradient descent on the GPA data set using the second-degree polynomial basis. Let's recreate the plot from the last lecture!\n", "\n", "Try to find a value for `alpha` that is effective, starting with 0.1. Remember, running the code may result in errors when the loss because `inf` or `nan`." ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Iteration 0/1000, Loss: 8.4222\n", "Iteration 1/1000, Loss: 145.9849\n", "Iteration 2/1000, Loss: 5237.2773\n", "Iteration 3/1000, Loss: 190565.1518\n", "Iteration 4/1000, Loss: 6936094.5941\n", "Iteration 5/1000, Loss: 252458390.4281\n", "Iteration 6/1000, Loss: 9188924724.3263\n", "Iteration 7/1000, Loss: 334456453841.9052\n", "Iteration 8/1000, Loss: 12173472183818.7656\n", "Iteration 9/1000, Loss: 443087353550525.5000\n", "Iteration 10/1000, Loss: 16127395693842116.0000\n", "Iteration 11/1000, Loss: 587001388736542592.0000\n", "Iteration 12/1000, Loss: 21365546981041463296.0000\n", "Iteration 13/1000, Loss: 777658463094317907968.0000\n", "Iteration 14/1000, Loss: 28305041090632451358720.0000\n", "Iteration 15/1000, Loss: 1030240637971712335413248.0000\n", "Iteration 16/1000, Loss: 37498471340486039288414208.0000\n", "Iteration 17/1000, Loss: 1364861082981141061241405440.0000\n", "Iteration 18/1000, Loss: 49677912438664971040315146240.0000\n", "Iteration 19/1000, Loss: 1808165691759096687700452835328.0000\n", "Iteration 20/1000, Loss: 65813215740321349824237300350976.0000\n", "Iteration 21/1000, Loss: 2395454899859450601758578572263424.0000\n", "Iteration 22/1000, Loss: 87189238707037747946882864510926848.0000\n", "Iteration 23/1000, Loss: 3173494665568048122825300965951799296.0000\n", "Iteration 24/1000, Loss: 115508158366061513026780840101096194048.0000\n", "Iteration 25/1000, Loss: 4204240452608712954086361696437541011456.0000\n", "Iteration 26/1000, Loss: 153025016010859968268156197975466040098816.0000\n", "Iteration 27/1000, Loss: 5569770756235891241019678809687657130491904.0000\n", "Iteration 28/1000, Loss: 202727286594885390642401604618254378535485440.0000\n", "Iteration 29/1000, Loss: 7378823030393350523865957739334609263025717248.0000\n", "Iteration 30/1000, Loss: 268572771965650972123455653535716790977647607808.0000\n", "Iteration 31/1000, Loss: 9775452473138979775141736859531470974883962290176.0000\n", "Iteration 32/1000, Loss: 355804761425408212920985321307108439771106861121536.0000\n", "Iteration 33/1000, Loss: 12950503171168331664408912455428402129222068103806976.0000\n", "Iteration 34/1000, Loss: 471369555917540311897063365729163750221767135262146560.0000\n", "Iteration 35/1000, Loss: 17156805052992715727020885280167457221902753493906620416.0000\n", "Iteration 36/1000, Loss: 624469603373982221215595676390315783807944692186616954880.0000\n", "Iteration 37/1000, Loss: 22729306787223548362675719381079479098912880919822831976448.0000\n", "Iteration 38/1000, Loss: 827296291503130871774001370629021904774582518701342947540992.0000\n", "Iteration 39/1000, Loss: 30111747812720558491298030283802229504049052388745012110688256.0000\n", "Iteration 40/1000, Loss: 1096000750455980876966606602277370213985198300952590956963561472.0000\n", "Iteration 41/1000, Loss: 39891993399752929904434334800475739851767199474924416989499752448.0000\n", "Iteration 42/1000, Loss: 1451979970582917966029636310977952020969988415860813672162252554240.0000\n", "Iteration 43/1000, Loss: 52848846480232025530961530840820504509192298445319872089955944628224.0000\n", "Iteration 44/1000, Loss: 1923580649097964541425339536570360799299631197398911320415021637304320.0000\n", "Iteration 45/1000, Loss: 70014063882514121466916110725761753227747144944332287378188567893245952.0000\n", "Iteration 46/1000, Loss: 2548356443304561014372692989854971428881290234609774079771789343812747264.0000\n", "Iteration 47/1000, Loss: 92754515336079020012691613151837251386627314240132247976470521806279671808.0000\n", "Iteration 48/1000, Loss: 3376058375913272376616733201410701102864665735379369395649891154199438163968.0000\n", "Iteration 49/1000, Loss: 122881027584225167591614690156471730930226685922478391076642760863702277160960.0000\n", "Iteration 50/1000, Loss: 4472596519030985698516236922976248058107228715337005289980805925623979220926464.0000\n", "Iteration 51/1000, Loss: 162792580883463530167969743268156856107390681765341139346785069512039392809582592.0000\n", "Iteration 52/1000, Loss: 5925288426518006088440994052250804340179163505811826321356386762464307829421375488.0000\n", "Iteration 53/1000, Loss: 215667340285988460922703937188027401273853277853952250391481314950334150763798855680.0000\n", "Iteration 54/1000, Loss: 7849812248441942647604553139862442544999613283006925464819698177572658304471703486464.0000\n", "Iteration 55/1000, Loss: 285715733564839980680906192325981761148140454412391285869899233210883248672273539792896.0000\n", "Iteration 56/1000, Loss: 10399418205537022714994124598847211906648175091562963142964623529457252148713163249942528.0000\n", "Iteration 57/1000, Loss: 378515728428066665859356788282425901456954893837827154998094216180392056735405182705205248.0000\n", "Iteration 58/1000, Loss: 13777131935240918614224271190084115515990274898694119416672664369375023678344914823790198784.0000\n", "Iteration 59/1000, Loss: 501457007214184181796147820949728733918927244815862299619882076638106859575557905579453186048.0000\n", "Iteration 60/1000, Loss: 18251921464219406509109140862193010302340565372368227486786499767522911722976444050686724800512.0000\n", "Iteration 61/1000, Loss: 664329408789663497466756381818509922665362759926706275879429203226488515918187931325968813129728.0000\n", "Iteration 62/1000, Loss: 24180115186666940567997795783301633034093381433447548624755542454139785335823685118029431708844032.0000\n", "Iteration 63/1000, Loss: 880102495395621878146722557399791103772016981913003062244194700355937425744133654636106519622451200.0000\n", "Iteration 64/1000, Loss: 32033776366322239178617389122965671505736847047265938401986379853615956605317087900932188340026867712.0000\n", "Iteration 65/1000, Loss: 1165958321509208290314270182688863442043299949459742837684241647189735323433564784223171568239093219328.0000\n", "Iteration 66/1000, Loss: 42438293629526525587533259598995241387231493616550437671290866788688081599678246305575714746123096686592.0000\n", "Iteration 67/1000, Loss: 1544659644312755523874023891704624510131423477129588698132891764613272484357492738209934873975221139800064.0000\n", "Iteration 68/1000, Loss: 56222180787880779105617807440601014919822634442567959452793905174730027435790526016731009455471721858465792.0000\n", "Iteration 69/1000, Loss: 2046362526646768844633741908906961060926483313355938766928063363603125186871713433441676344417595779790667776.0000\n", "Iteration 70/1000, Loss: 74483051560440818397588870619862988530685692861714000860237158570936206440674556630155177796455376184653381632.0000\n", "Iteration 71/1000, Loss: 2711017670386073873330546698848546933610253074100188093841106378905309747334089182424284309576130008519588970496.0000\n", "Iteration 72/1000, Loss: 98675022775906793129521822863215645845697628772313160927568383194761794230063647237389256231790596337503511248896.0000\n", "Iteration 73/1000, Loss: 3591551698900994641913468447143042090042168323819808608913573479721522103287164398970490196032291471575757145243648.0000\n", "Iteration 74/1000, Loss: 130724505989454893642531468620877499812911797010786683268334259227545276665226800755396652892294898176534959701884928.0000\n", "Iteration 75/1000, Loss: 4758081714768627407003863272699691718521204512047902894208293073243428556845489781730790943548913931703800942909456384.0000\n", "Iteration 76/1000, Loss: 173183608024051784596585767137460629142169262551415737168798002235857659351522126295009910779427780764109545215874826240.0000\n", "Iteration 77/1000, Loss: 6303498738816190698225686502564206860116857878364966518228618854620448979068440622803851425042839074756977135057425661952.0000\n", "Iteration 78/1000, Loss: 229433355752347037573101889019400640546853078974289077997901474460254177194725380799648707708289124093864435624815955542016.0000\n", "Iteration 79/1000, Loss: 8350864640875437294238419354445015715764234238204193096724771199492151610596951771235197300893089799616658055062016145489920.0000\n", "Iteration 80/1000, Loss: 303952927949580662277557373629239147300493645605776383956056437401120031857512461297525334696103050416201225188344019963346944.0000\n", "Iteration 81/1000, Loss: 11063211581338455155813505229044982119919750296675559896043114405756311900316666069965409483664966853183231124802244444646539264.0000\n", "Iteration 82/1000, Loss: 402676333204344287566987462863278946540989389716279191634404324629704968653661457134482051865465158487710045951346640409539379200.0000\n", "Iteration 83/1000, Loss: 14656524295025635762975244158982624386810696646059402772553853103673351760715714255066987999852490690677722315759794509695009423360.0000\n", "Iteration 84/1000, Loss: 533464936221285754878323518968997417158372898555541580031300156905070386190523137346360461050358159998189529986882320359348708048896.0000\n", "Iteration 85/1000, Loss: 19416938999252873692442587979451921813776673972302150080017791174750782157771092401903649711816817653714546386431302183638038275424256.0000\n", "Iteration 86/1000, Loss: 706733459880701745304375349625294608009341018367875800945041729566911713487452957748426886992054758193238525508520110259966042922024960.0000\n", "Iteration 87/1000, Loss: 25723528478621989195671121494626784606912484863994649959993551438327333084496587918757212662187645695819266665446619012803213693309419520.0000\n", "Iteration 88/1000, Loss: 936279311725488727054666214354632375268209662505592449143237854978981609573314645285380327403429447742717239885235230592532218063634825216.0000\n", "Iteration 89/1000, Loss: 34078487727439314356601940590807032375113498985264527218950985521849986166428465642741783215653167228098359138912173152638822034251347656704.0000\n", "Iteration 90/1000, Loss: 1240381274311153446483755500906695158564980530995782641937983693558022689138764013728760994638402798973547264609620377390117421912011141283840.0000\n", "Iteration 91/1000, Loss: 45147123838566211357411621168720107363084753712571261020099979238108809511427595033134260429494933408774345477387282460436539440282224830709760.0000\n", "Iteration 92/1000, Loss: 1643255048353405247748336966085789264353691329259979850003043568080105085068978784184003580448874934378020960727391753556887396505935967147786240.0000\n", "Iteration 93/1000, Loss: 59810834541629755842376096240203178962708077420038797447180137510882545838546697166149815763673471250379404638747201400462041004011243070860296192.0000\n", "Iteration 94/1000, Loss: 2176981553868231042852587946560523321344666922007969933208253523662403787051046944370694757388334956531119608862126755186889231329413266248258551808.0000\n", "Iteration 95/1000, Loss: 79237294082293899743212607775929381279812663913142773954657944940791690515312486419204291075961156982883728627368994784841138619935028687905000259584.0000\n", "Iteration 96/1000, Loss: 2884061540313793999621556429200803925678906995182386800920246357243783054396623070576480950724546157964631381280835656200135342499029339429752128143360.0000\n", "Iteration 97/1000, Loss: 104973435358336456490032046125224364880014479952181640365016741254185672412642260614291650676271457601324373478031793794715973662034561739806079824429056.0000\n", "Iteration 98/1000, Loss: 3820799929855830152619849201083706013759063892371604359299643637732554689581806565416472007588995079276970181223871930409144262619806391774317151111872512.0000\n", "Iteration 99/1000, Loss: 139068632498812265277010122850075049804654245775511293960650628657891082735945798093213400964850171018651363605615716633142660731741022264499712589083705344.0000\n", "Iteration 100/1000, Loss: 5061789389694490287683032140416114608554458645396608546669858899008422227535779158802777889764136455584228877081833825058182721743341846087561650078805393408.0000\n", "Iteration 101/1000, Loss: 184237892939966587886357517220819478945096126858619871574751862704376697494385695017854253754625578074532080904583839790952283628221123911830783277173303148544.0000\n", "Iteration 102/1000, Loss: 6705850161222784036790353693501103644336566740597916028836633103711154344403747570490908456607764157735021620101367590088911971126450970709568959388515953541120.0000\n", "Iteration 103/1000, Loss: 244078054015872294589610701872561912374255410652407023756594296470250921694067292832707102800605214999012551051983127567855583275138259282420709236686892786778112.0000\n", "Iteration 104/1000, Loss: 8883899135812478880000503627863987984681989058761996212000887642870049721752874205201497437338088697661952813152356271390839549345763110919389245515443037181837312.0000\n", "Iteration 105/1000, Loss: 323354199841979023098811392430038233755548096084924426875276224595555032760496691019304034425804366174424484383700937707558210069146715596050793726376872017263591424.0000\n", "Iteration 106/1000, Loss: 11769374793321999306219846731632651067030222943675301254471109534999112183146899787982497931402636011503302221537359434296136841849433513973661301293836399353897418752.0000\n", "Iteration 107/1000, Loss: 428379105925873727598231038752315960933386572095463143520366608720325254909749011282643824962051638375966565296429381150444905313653808015926846215755102415970646360064.0000\n", "Iteration 108/1000, Loss: 15592048143285794704730661160413914665119589556600655109166263543395209198521070587289433397910437621153953317792539798976465270049672938422798844574127001243149538426880.0000\n", "Iteration 109/1000, Loss: 567515926756264078294812670094117604080207916449214734958407919751893284409248307424851758275484413246508685866243359068125617848829403665352080994313006176055344807018496.0000\n", "Iteration 110/1000, Loss: 20656319436822160486137217277745885719570971845686486845305644811250758288284693218177745345210531562657986384951276038648709264372124184089411442887764661928880669869473792.0000\n", "Iteration 111/1000, Loss: 751844155484447707644977069554465161338494433770759596980577246053531477799159403398315795167908786255344725624492441942452737747896128799484638042550525390287486401808171008.0000\n", "Iteration 112/1000, Loss: 27365457620125059817893674012136265825527015878137623229178374423150971349842442993965688108289342637996988622127375004041099347146957945143014126089983073330104327248285794304.0000\n", "Iteration 113/1000, Loss: 996041886202241423924087407529751989646091116902794616559225480343999747580633518257436971711474640411830028820475330943639259010638803968448548904598685719642471043984988831744.0000\n", "Iteration 114/1000, Loss: 36253712722118376989593725055516445811429260964517965511672334533781394757227513478675145428505086893648621699359831434781373497120226299303251446958716941756661518289724596289536.0000\n", "Iteration 115/1000, Loss: 1319554633539797972513724602162660340163633571448139076867441563549177751028069174565812442865779094580050462606452375129247491081364525136090063876347064328519696964767635449315328.0000\n", "Iteration 116/1000, Loss: 48028858292188906559106881647340842482355817131940255440824883662065737048665584345809825166247453263393024590256567922260999011245806920085389230827045894943525683263605186983100416.0000\n", "Iteration 117/1000, Loss: 1748143782924006687732172661831943531568329477273299736751944785547715436325366799282230533422491583609105816499091586653934687040057986584928324627408828770671429486978390012435890176.0000\n", "Iteration 118/1000, Loss: 63628551550909241983851476107649335779096840616245470489136249037034733196321140433859862221488734636093399390735770837251555988556634309273226310028717848300188505957774427249545576448.0000\n", "Iteration 119/1000, Loss: 2315937974904385902701410315109347737091993792942800010852770525323932070809903903740877089363515863945069015265550821514318712553073665536902708853456418395394359060849179582216674476032.0000\n", "Iteration 120/1000, Loss: 84294999223938486881375856649204739523046121087123492213275447513455362476020795635242982857574543047831776084532184911338037107138002674108771200578056007669708197312546988220640902774784.0000\n", "Iteration 121/1000, Loss: 3068150775694738467630515839833874540905344556549470126443674389453545760563901000382456317883220319072461366998232804864214242467883301965867037540059807509048601559032275842859956332134400.0000\n", "Iteration 122/1000, Loss: 111673874714538474651376555897094732284687747523822209596535637955602756240538044081722117178848384974872246730601172021506156703374859735057707383010480516738877845575606535044383782006161408.0000\n", "Iteration 123/1000, Loss: 4064681042584857879187138397292446112004361350733912208929051362697173883347231237641976446689221819000429141425575124748171128263248540961724365802856113164501378978253955996008814957912129536.0000\n", "Iteration 124/1000, Loss: 147945363409136125699273567710002214903444286421479307578670425177478471797725065591534222846067235882206080985425210887077986471044395713043485694629745115730258294385024952235345634950327042048.0000\n", "Iteration 125/1000, Loss: 5384882682047347286429424444199932705487108561761963722139706889925237531739284944093162112860758302779741437090465484905198351270969958694557309141396252178259863572796298191052696495077132861440.0000\n", "Iteration 126/1000, Loss: 195997771280088428260951650563349438815123244729618597370157385267922295315578469972386249016534289629802671018818352848452990042429041803760507084429427123380639323815560222100506719624859865841664.0000\n", "Iteration 127/1000, Loss: 7133883617341188644861708454095212095981006386123721151262411309110946896619689479206441150634861239828337107469190269778480673061699663517448704237906804231728518192049734789952718778895772241887232.0000\n", "Iteration 128/1000, Loss: 259657521273759526360433241195415103756476744897752746480326193202133459002261374386610125987440666052210835753464176523317227969725305383640481599230274411462964507496404127548050110997357685458862080.0000\n", "Iteration 129/1000, Loss: 9450957146278931221557217833628588964640984888688539435716478407597815779979796363074408167868423805568905953794867258610257329906903918066617483532224549705296913145464101581300449786523927796735016960.0000\n", "Iteration 130/1000, Loss: 343993852142758331345382844594917251939407119913897048889444006376690222215630148790936534324612351534017194962791623319344528274883920358779659727280849944822784951647681564788727158792010173941698002944.0000\n", "Iteration 131/1000, Loss: 12520612302067621276476573428843385207718300393590804195145371504802870766072780547536347080450663505748705654719087114022735702413013859266715956412433474620381361914318896585165586874879028792060045950976.0000\n", "Iteration 132/1000, Loss: 455722482951901372631456535481038878504783814623173231540136976414150669759114591452282844652516163337810192919586611440699374691739689890978352141699523923859263442913014288615921965049623850640116002324480.0000\n", "Iteration 133/1000, Loss: 16587286344897834165344880071734942886560829590832683190080394584694381327075680734719900568366245775603253364946956648993823028553367636102563830657065709699267330236017877607130633011256075677519843543744512.0000\n", "Iteration 134/1000, Loss: 603740387144061819368816882442901644364493270592150259878367722370874445710017559237641204869157429605571325841141013580078472233093041093906575015036147891562593340450681441159594894333796722802773739895259136.0000\n", "Iteration 135/1000, Loss: 21974809350354091880219971802560803790262822033614707820916379746015838121607318097644407250642966498203388737666156988891633826138784753470226144236693122675243339677186632501268693550503277816661319511519002624.0000\n", "Iteration 136/1000, Loss: 799834260332800987735387922060097439321019207869387910784137584500039632324414249280587938815521514886069779199225340233528615776110356861886242076984944695297109219462894503943326403537099603648994354980964007936.0000\n", "Iteration 137/1000, Loss: 29112190863754208244908752008785872234566986539466466191532428491331091020781626695062158611344407239077262572935159056717326792089195477537165720768018027533739738404505893230258932527641453006882911291341159792640.0000\n", "Iteration 138/1000, Loss: 1059619097255238557774913226152864009699530700654442484730530995586953565568625376934608891102752792492170037894832465576333538913990554631580671341347604201182859229572014429442138128042832631270774964607507159318528.0000\n", "Iteration 139/1000, Loss: 38567782016911904886447916561845097705367514537362276738361640706905260258712270067594924227529278340792924388109991888667100225366793676787437142171842316800192904433188293684588081343947334466698548098335811005579264.0000\n", "Iteration 140/1000, Loss: 1403781616957526644222073539138697844790552633423974961678795099873003699356953090195748719835087252161812419294352046380072566548105055206635795549256547122824926838668022676815876192032199700574765278676161085887741952.0000\n", "Iteration 141/1000, Loss: 51094533443582058044350628148989324509306089448630829530903282264358601337499305619597201309704062953278669078598419542612542946936603249539337541030752322803321155363026636493153448705003506002474805239923076756079116288.0000\n", "Iteration 142/1000, Loss: 1859727550411649349326043802630024666480011932868952765422289566830289376176064630302975168274945987294359729251789562504978358590313085180628184839808627836719739049734181576431133824894553824664093890423362749197784711168.0000\n", "Iteration 143/1000, Loss: 67689952890538528698798657764592752927205454043451633378219692808623934973131915065942127120767425517807527401710140769139102281751309385163050044048197022283747295043797591028335231646024859394350247003738489562662453641216.0000\n", "Iteration 144/1000, Loss: 2463763964409258029963911936978429144933605292474852724601800337061313022035789014676479435321855106099652674260964452815453554612489657004625019715836126412638458304880805481804251734087722194128994908612998654147783022346240.0000\n", "Iteration 145/1000, Loss: 89675536960967331090133051528993936619318772034620575679012044630784893897329331232527468920260596055151139055718205910867424485910456896626102667915269684996510183858935805142636170069743946540519835629282627767975752381759488.0000\n", "Iteration 146/1000, Loss: 3263990400625083015804936783051335209668894641234916945878707504199198944091365201016133980648304472775536547061116245953013388139557932514416924258989935366412035187378654623340150204149249130244477018924855047186116047198486528.0000\n", "Iteration 147/1000, Loss: 118802002155948589511765243849446527797776903443598912532428051539629558517212294435416120418207642316031284899595713001179653645258345295557324839378315019141011202489437572808962978756128290968991906293794362473374072871871053824.0000\n", "Iteration 148/1000, Loss: 4324129051837614716156818856649167107325925050565870129782835871942365798283789735065715471840066869383069791745699330091621182071893795702125638270086882078683778190262066988785732419302790842212307019760650963649493622910168858624.0000\n", "Iteration 149/1000, Loss: 157388694783119238989831449475105757409009196486128937859609087264633181632740355429326853164618960189476520605784381650114138437055293401369871887119686923312440155426833721632706855062645255527067126221096725096760974145098807771136.0000\n", "Iteration 150/1000, Loss: 5728598973013284890920216622460656897565487287673719727635616832759982179717484340416805515984752971533894997817934655029927184801375315208005513589304540589718427367900653385847070792395596848143810903816065300558923027573512431730688.0000\n", "Iteration 151/1000, Loss: 208508280971707009713973971250748993025273880128628282012503536407765640084610694019220708504088878315911473507201540001729363133912949331760968560909873149743937121400278850872777272552463676859647019854074261374741619163725908322287616.0000\n", "Iteration 152/1000, Loss: 7589238387707869597040825333511335595229040860432817008263734235416695551784974938958301958238860936248341618953916475291959064885082787901066420216268576075893031625666665030527954184497826816617974356942189405264717707593928976476667904.0000\n", "Iteration 153/1000, Loss: 276231423697144088073116916214807236733715785335216256021081231666759991393389696387043360966283642604380279910029465252034231496132659115615462752182055142429832811449900899438154024899888385099491727978746802794370118263188069538317991936.0000\n", "Iteration 154/1000, Loss: 10054210388401925437026865478536075174546727641402064495614468631382558271652601275672390588011539696816363547091987649415701682154027287914089041254854818908666817904842799826661334904224093423669338207686779153661877879178631795377390485504.0000\n", "Iteration 155/1000, Loss: 365950930496161022737205748512007819096486467600514197979759537643383557808502011029396394435009921235928643487834144049300649354525030205693310378626731179597640892792096483690637756107480925200404840181199183438754447412951078119184244146176.0000\n", "Iteration 156/1000, Loss: 13319801193487072615583867006516368161464806248714622531504971532153317266657818804791716150588370344734153272940416486553767671172713524141059616590084580893075473634881488850611134043750653152707403536707249846580777318431594488921092415029248.0000\n", "Iteration 157/1000, Loss: 484811183820396843242025810621893242599097309715474000746584920082223434631351501288247690817639970150884295059281309767214798756174653538957682933515416221869236592181423353926806723391937423462114262296770525173392705127596802456738923414552576.0000\n", "Iteration 158/1000, Loss: 17646050458490482276358110932717209431174220909307227822728222948905372538119920739806745734408502017297047923477027199426530510388155277423524742487370161308650141693119817300409478326042874213144537183859085726348029738851171418265361602513469440.0000\n", "Iteration 159/1000, Loss: 642277049654339499162964242778254611861273636830615166140646523566940367269553796854710544910331719387671699848740536747939247224946295455751048865203916466516188911167243461589177150321384623416175621431997554925633811533439895478171093729915437056.0000\n", "Iteration 160/1000, Loss: 23377458286378020224161592417656052199636571543215637870326874357083343459754098657351093486427499628230978322547795570879985455931440375090834590729854973621765852512697985900679060898447667764717375157150448748682800379729684608733421783249231806464.0000\n", "Iteration 161/1000, Loss: 850887566705773653805779404673582390854469639496420301269175056173011647395123036899720756336554132619198764566789587663458301220892250626466830503327141740761242780259927410383427867664200274548106363176489310508972519536943104634734865018219055284224.0000\n", "Iteration 162/1000, Loss: 30970417840349683548701522346636760346935284396769320146294333641804337377355361855402739027625978705115605603465090681771418297554373933702215013634172337947649244416394577400058507725878802981046975067263588677251834827705311062983342911271532457099264.0000\n", "Iteration 163/1000, Loss: 1127254432591230882488975374039376725328811122228554008963125808477998897925298975996914356377704302055276543349442490708970786234796792637099483211239541065732758561284864442419621202980199675366006374169708904443453536068479450615781498701077170475761664.0000\n", "Iteration 164/1000, Loss: 41029558023626284635942375811182974887037784184754012712037172696728307827525820993510483108218423721847655252013730007048334803998368118904970454183868488977421102458403544741448883484938283196132310682234798585524589345844588762865460620331248837329944576.0000\n", "Iteration 165/1000, Loss: 1493384796673108745644517026625548444709623758751979424554756355950971082326351663118771538660387272609303814325880991426303019229317072978948946778780509631560015752849519170735657048353837308164457335894421300431057304860291894885355492164241490562550595584.0000\n", "Iteration 166/1000, Loss: 54355890201160692604053300538973736903238776780364867271782950993366024776805231661856843299795839130180502572043525245317670720980997584685710677596818081203017684900437974373126517570927706903013549772421027854291320399667456382664390776317966275241253535744.0000\n", "Iteration 167/1000, Loss: 1978433693809306093344463208231648914481336837621803203321961768125311685233760510720352530971064833418350757210528074424739570179778836856596166770204242436633047849280541510355476076056288157216669008099365870852815825387072778581213885737063345750167431151616.0000\n", "Iteration 168/1000, Loss: 72010592896450298130684588938676955857528948786403582412696940463948476870909741696401003114112179001777305686374916746350515775747412418548194640988793211979247043862746412601240884791346683114669630577764908145393835723943016548451561812516193130486673788895232.0000\n", "Iteration 169/1000, Loss: 2621025665668890168090195037453608911090464979695590058441958240140236262483053749430573869492316230552007293850939352658597066622423754493528513476289869723425920786034982076197454800413566605607312688983904310289102870704051267636646425635603507226366831543451648.0000\n", "Iteration 170/1000, Loss: 95399513651743429489634933826585274848113028297568453423139097506173516812871183567635367983235308773202895078176492372514292319958319959667360293439719566096447239535946852292868302547965373115432778800835915054619886584421560010573042999570330498896689378162114560.0000\n", "Iteration 171/1000, Loss: 3472330440788176099086036730771204130074449733027891629167306968069942097507529153084851151804100138704058150126497308605991194141492820552139108215536928375821050956244162441117797031357383655937620925798184848839665075382741926463146057481704343918293677294730870784.0000\n", "Iteration 172/1000, Loss: 126385116951839601939011424081581206794657101155271431500522862695462485529077224268316831099790214334496419386824652783897048457860812242419785371858335819999211386017892013996870769127946004685467185166023838952964062134093925119177964896818554934323004949407956205568.0000\n", "Iteration 173/1000, Loss: 4600137590391442244976460269472144790945890521914632401680326213689610620715573200905704201568679191520976614275724996224570895098710580417818597096968885443242326811630297992135629592448241750631390081771482718032247234913273973907362873218334313246940978586773729837056.0000\n", "Iteration 174/1000, Loss: 167434792647271175223134151102122416451304098740562487670891424675618725672988871148052899096606643460496840745753224256161150010948456870174919211943051693871118357744807502463523838632492643707437107496184947411885681337875089669460191007301681807259005681929818478739456.0000\n", "Iteration 175/1000, Loss: 6094254625642435063549443039987067664099994302850529942793499962389782513437099215404693122122182063794175788607586898379399387607787882391081168201178806203916535709592086993098585426959322427623825504174980463531933513397116141399427623355388870456371882865020627333414912.0000\n", "Iteration 176/1000, Loss: 221817334706565956863257901044821366698865477071475496830678052959818217042939990120091202210759149341564337759328236369173960884954659870085363950700409112133607274916472896627402311240007127592607664605415860720999526271160248831765723118165736829863843873515557356040093696.0000\n", "Iteration 177/1000, Loss: 8073658387901359280838466315015271652719029031594009836857684184497225375215138760030729310460094205561664987916716932747457308086069026456253026615305795415728455028278637926639677187085126666526971735909802027813971791958086659345350774570483915625085601824267031637139652608.0000\n", "Iteration 178/1000, Loss: 293863235940326626186798943780880030514184303557066182250698566828573860706046602076281353246059631814392810944110443276862569626220880958959754897601760687400290648773945131476704353229097755523196035789502278510940292956355807896740427803356907219641142907730408470519465639936.0000\n", "Iteration 179/1000, Loss: 10695969198638225057905197893949192142224048769152297428425094557449430908522826021369438522512932890014583205770731429007118642298502610644166189850326792613789765610813649614914267399031704128379251155607694785354350390583741175785364345710686964692596819682062963153044009648128.0000\n", "Iteration 180/1000, Loss: 389309526018589836926975605093034647604534716410758757480346446458504714056478108774146455776474887988721926871677280248932707304821512756512348568645644248177016039026181271614830256635165194742125103751638895443917277762610851630662213678256011786821192690369839876906923035983872.0000\n", "Iteration 181/1000, Loss: 14170002197474112909162487116726632782555662968366451838344311478834310425752894971229778044864849049286372409031839396540745945370410316291383805376608516648119522993921635311584343395715973297571479602227565073974552927849169234091534547204933929534089261759242698796182345753821184.0000\n", "Iteration 182/1000, Loss: 515756612302451980593333750113983150787087243528948760068272587677657642166673913959810068489902794891166442324043809309032927197457585504109411451243778571399421315454749664415685890144119730900207547757037485791183946043953324803259347481160735857284725989171129477033485583020720128.0000\n", "Iteration 183/1000, Loss: 18772395333934302619104307180006426782298272768356397141324712081995961660643278726575443972837837228147772947360466133402319819717583564330037438123677985412758670130642405106594593850122780079754191943692319192536063632226623343562858484087106732616289036558060142743980575965786931200.0000\n", "Iteration 184/1000, Loss: 683273501817677059952634283047481084230933863715676420234899891887650507493626713992599316054530183619816740894358300464509024743721989640392610974795207465573834549784659840803970643534693567018128181947522577893953554475815429436246479268600434385554452894818178440938289137452954157056.0000\n", "Iteration 185/1000, Loss: 24869638103255636174727976899623537794964611054349374132298136692178389558874996581574832687543334686980715086113759655381790147918979693871349295678302839332628952283668622780544447228571723502189654335422431118864882464840233239035726542667145466557779968679863326809339700447799232430080.0000\n", "Iteration 186/1000, Loss: 905199598318308168408906783581365375393080369314397095202338126723510758949557171084249435917390066182728720087972995513262072711431293723912417055018940767918858341548099359631455523516318266491856630654484636906242620079658924042631009507407276828739609111045708448300472061612799907856384.0000\n", "Iteration 187/1000, Loss: 32947255178930882316182380886565105647036207384721006393976804473157543190357033663458034328514147788482096359097699906334528192116512926782714513932378967487811118956795272582480702388575712791363478886896882457204185246652115928538133826620977626608841634130170784204539868612083397519474688.0000\n", "Iteration 188/1000, Loss: 1199206921702445204154123042164411165552751567012515180446142265144969538490140808400645630091891971907718882050918027981505239015051669692815712216180684422117926905757062975864008125998071067169427414431342555547274571041193286730093563661234345308937733950341908476089094195795801873391288320.0000\n", "Iteration 189/1000, Loss: 43648468840545152589764157826491884908612868173564537033418284861914559227673285882904667619075474554790649988537710300739386839324629563935676808522084685084940230340608483172471818311656834103993489196153487899784675755981056175902487754635531968141868065548598889334836797414750553457045798912.0000\n", "Iteration 190/1000, Loss: 1588707334526850324309822147017463875944277458932836570091113235229717291365372780295153493093910686186826531183398163253192618224713750332013201163948209015181434989514096667837063310231346829453513216852410205083378245396084133276071055145388600397156512906378731533948351763054471999571527467008.0000\n", "Iteration 191/1000, Loss: 57825418893843729454311882391300627780257934236385991840537656597773045119553066662164545385911255095113922693357486640393026878446599335787516159656777596412198361610357366190683283028475817545459288241310230426792359676291820674371858515678639040134978745967117812659261221149358664364310654877696.0000\n", "Iteration 192/1000, Loss: 2104716833352031375238214432645049068299858860089308560004655190604321294904690081222088985214392751292989323373717629454370499983612113255903118677796212380930051219514010974901972384426838725075930649571846769914123111100753496330999038715451473307534167485737813304422315439387776653438141151051776.0000\n", "Iteration 193/1000, Loss: 76607018735613830582841598565894301752016021834028271541456736686973538592759292140117545111966270235444279500052547856567615380085455014996125631965538233811786144547914128184785333281341829935693121592206802648204484200380475835677771036908885466801628943222267158447104176383696752649367081946447872.0000\n", "Iteration 194/1000, Loss: 2788325358814246001812974657729798014050546230572306627309427493623837325931504170832242886928267950041707919427233465372096408168793095926282760048754088578633423095308465662430910785658614663878767429128827839832108752267560464972497178762715830813569890909051878874513484695499236345907048046598815744.0000\n", "Iteration 195/1000, Loss: inf\n", "Iteration 196/1000, Loss: inf\n", "Iteration 197/1000, Loss: inf\n", "Iteration 198/1000, Loss: inf\n", "Iteration 199/1000, Loss: inf\n", "Iteration 200/1000, Loss: inf\n", "Iteration 201/1000, Loss: inf\n", "Iteration 202/1000, Loss: inf\n", "Iteration 203/1000, Loss: inf\n", "Iteration 204/1000, Loss: inf\n", "Iteration 205/1000, Loss: inf\n", "Iteration 206/1000, Loss: inf\n", "Iteration 207/1000, Loss: inf\n", "Iteration 208/1000, Loss: inf\n", "Iteration 209/1000, Loss: inf\n", "Iteration 210/1000, Loss: inf\n", "Iteration 211/1000, Loss: inf\n", "Iteration 212/1000, Loss: inf\n", "Iteration 213/1000, Loss: inf\n", "Iteration 214/1000, Loss: inf\n", "Iteration 215/1000, Loss: inf\n", "Iteration 216/1000, Loss: inf\n", "Iteration 217/1000, Loss: inf\n", "Iteration 218/1000, Loss: inf\n", "Iteration 219/1000, Loss: inf\n", "Iteration 220/1000, Loss: inf\n", "Iteration 221/1000, Loss: inf\n", "Iteration 222/1000, Loss: inf\n", "Iteration 223/1000, Loss: inf\n", "Iteration 224/1000, Loss: inf\n", "Iteration 225/1000, Loss: inf\n", "Iteration 226/1000, Loss: inf\n", "Iteration 227/1000, Loss: inf\n", "Iteration 228/1000, Loss: inf\n", "Iteration 229/1000, Loss: inf\n", "Iteration 230/1000, Loss: inf\n", "Iteration 231/1000, Loss: inf\n", "Iteration 232/1000, Loss: inf\n", "Iteration 233/1000, Loss: inf\n", "Iteration 234/1000, Loss: inf\n", "Iteration 235/1000, Loss: inf\n", "Iteration 236/1000, Loss: inf\n", "Iteration 237/1000, Loss: inf\n", "Iteration 238/1000, Loss: inf\n", "Iteration 239/1000, Loss: inf\n", "Iteration 240/1000, Loss: inf\n", "Iteration 241/1000, Loss: inf\n", "Iteration 242/1000, Loss: inf\n", "Iteration 243/1000, Loss: inf\n", "Iteration 244/1000, Loss: inf\n", "Iteration 245/1000, Loss: inf\n", "Iteration 246/1000, Loss: inf\n", "Iteration 247/1000, Loss: inf\n", "Iteration 248/1000, Loss: inf\n", "Iteration 249/1000, Loss: inf\n", "Iteration 250/1000, Loss: inf\n", "Iteration 251/1000, Loss: inf\n", "Iteration 252/1000, Loss: inf\n", "Iteration 253/1000, Loss: inf\n", "Iteration 254/1000, Loss: inf\n", "Iteration 255/1000, Loss: inf\n", "Iteration 256/1000, Loss: inf\n", "Iteration 257/1000, Loss: inf\n", "Iteration 258/1000, Loss: inf\n", "Iteration 259/1000, Loss: inf\n", "Iteration 260/1000, Loss: inf\n", "Iteration 261/1000, Loss: inf\n", "Iteration 262/1000, Loss: inf\n", "Iteration 263/1000, Loss: inf\n", "Iteration 264/1000, Loss: inf\n", "Iteration 265/1000, Loss: inf\n", "Iteration 266/1000, Loss: inf\n", "Iteration 267/1000, Loss: inf\n", "Iteration 268/1000, Loss: inf\n", "Iteration 269/1000, Loss: inf\n", "Iteration 270/1000, Loss: inf\n", "Iteration 271/1000, Loss: inf\n", "Iteration 272/1000, Loss: inf\n", "Iteration 273/1000, Loss: inf\n", "Iteration 274/1000, Loss: inf\n", "Iteration 275/1000, Loss: inf\n", "Iteration 276/1000, Loss: inf\n", "Iteration 277/1000, Loss: inf\n", "Iteration 278/1000, Loss: inf\n", "Iteration 279/1000, Loss: inf\n", "Iteration 280/1000, Loss: inf\n", "Iteration 281/1000, Loss: inf\n", "Iteration 282/1000, Loss: inf\n", "Iteration 283/1000, Loss: inf\n", "Iteration 284/1000, Loss: inf\n", "Iteration 285/1000, Loss: inf\n", "Iteration 286/1000, Loss: inf\n", "Iteration 287/1000, Loss: inf\n", "Iteration 288/1000, Loss: inf\n", "Iteration 289/1000, Loss: inf\n", "Iteration 290/1000, Loss: inf\n", "Iteration 291/1000, Loss: inf\n", "Iteration 292/1000, Loss: inf\n", "Iteration 293/1000, Loss: inf\n", "Iteration 294/1000, Loss: inf\n", "Iteration 295/1000, Loss: inf\n", "Iteration 296/1000, Loss: inf\n", "Iteration 297/1000, Loss: inf\n", "Iteration 298/1000, Loss: inf\n", "Iteration 299/1000, Loss: inf\n", "Iteration 300/1000, Loss: inf\n", "Iteration 301/1000, Loss: inf\n", "Iteration 302/1000, Loss: inf\n", "Iteration 303/1000, Loss: inf\n", "Iteration 304/1000, Loss: inf\n", "Iteration 305/1000, Loss: inf\n", "Iteration 306/1000, Loss: inf\n", "Iteration 307/1000, Loss: inf\n", "Iteration 308/1000, Loss: inf\n", "Iteration 309/1000, Loss: inf\n", "Iteration 310/1000, Loss: inf\n", "Iteration 311/1000, Loss: inf\n", "Iteration 312/1000, Loss: inf\n", "Iteration 313/1000, Loss: inf\n", "Iteration 314/1000, Loss: inf\n", "Iteration 315/1000, Loss: inf\n", "Iteration 316/1000, Loss: inf\n", "Iteration 317/1000, Loss: inf\n", "Iteration 318/1000, Loss: inf\n", "Iteration 319/1000, Loss: inf\n", "Iteration 320/1000, Loss: inf\n", "Iteration 321/1000, Loss: inf\n", "Iteration 322/1000, Loss: inf\n", "Iteration 323/1000, Loss: inf\n", "Iteration 324/1000, Loss: inf\n", "Iteration 325/1000, Loss: inf\n", "Iteration 326/1000, Loss: inf\n", "Iteration 327/1000, Loss: inf\n", "Iteration 328/1000, Loss: inf\n", "Iteration 329/1000, Loss: inf\n", "Iteration 330/1000, Loss: inf\n", "Iteration 331/1000, Loss: inf\n", "Iteration 332/1000, Loss: inf\n", "Iteration 333/1000, Loss: inf\n", "Iteration 334/1000, Loss: inf\n", "Iteration 335/1000, Loss: inf\n", "Iteration 336/1000, Loss: inf\n", "Iteration 337/1000, Loss: inf\n", "Iteration 338/1000, Loss: inf\n", "Iteration 339/1000, Loss: inf\n", "Iteration 340/1000, Loss: inf\n", "Iteration 341/1000, Loss: inf\n", "Iteration 342/1000, Loss: inf\n", "Iteration 343/1000, Loss: inf\n", "Iteration 344/1000, Loss: inf\n", "Iteration 345/1000, Loss: inf\n", "Iteration 346/1000, Loss: inf\n", "Iteration 347/1000, Loss: inf\n", "Iteration 348/1000, Loss: inf\n", "Iteration 349/1000, Loss: inf\n", "Iteration 350/1000, Loss: inf\n", "Iteration 351/1000, Loss: inf\n", "Iteration 352/1000, Loss: inf\n", "Iteration 353/1000, Loss: inf\n", "Iteration 354/1000, Loss: inf\n", "Iteration 355/1000, Loss: inf\n", "Iteration 356/1000, Loss: inf\n", "Iteration 357/1000, Loss: inf\n", "Iteration 358/1000, Loss: inf\n", "Iteration 359/1000, Loss: inf\n", "Iteration 360/1000, Loss: inf\n", "Iteration 361/1000, Loss: inf\n", "Iteration 362/1000, Loss: inf\n", "Iteration 363/1000, Loss: inf\n", "Iteration 364/1000, Loss: inf\n", "Iteration 365/1000, Loss: inf\n", "Iteration 366/1000, Loss: inf\n", "Iteration 367/1000, Loss: inf\n", "Iteration 368/1000, Loss: inf\n", "Iteration 369/1000, Loss: inf\n", "Iteration 370/1000, Loss: inf\n", "Iteration 371/1000, Loss: inf\n", "Iteration 372/1000, Loss: inf\n", "Iteration 373/1000, Loss: inf\n", "Iteration 374/1000, Loss: inf\n", "Iteration 375/1000, Loss: inf\n", "Iteration 376/1000, Loss: inf\n", "Iteration 377/1000, Loss: inf\n", "Iteration 378/1000, Loss: inf\n", "Iteration 379/1000, Loss: inf\n", "Iteration 380/1000, Loss: inf\n", "Iteration 381/1000, Loss: inf\n", "Iteration 382/1000, Loss: inf\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "c:\\Users\\pthomas\\AppData\\Local\\Programs\\Python\\Python311\\Lib\\site-packages\\numpy\\core\\_methods.py:49: RuntimeWarning: overflow encountered in reduce\n", " return umr_sum(a, axis, dtype, out, keepdims, initial, where)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "Iteration 383/1000, Loss: inf\n", "Iteration 384/1000, Loss: inf\n", "Iteration 385/1000, Loss: inf\n", "Iteration 386/1000, Loss: inf\n", "Iteration 387/1000, Loss: inf\n", "Iteration 388/1000, Loss: inf\n", "Iteration 389/1000, Loss: inf\n", "Iteration 390/1000, Loss: inf\n", "Iteration 391/1000, Loss: nan\n", "Iteration 392/1000, Loss: nan\n", "Iteration 393/1000, Loss: nan\n", "Iteration 394/1000, Loss: nan\n", "Iteration 395/1000, Loss: nan\n", "Iteration 396/1000, Loss: nan\n", "Iteration 397/1000, Loss: nan\n", "Iteration 398/1000, Loss: nan\n", "Iteration 399/1000, Loss: nan\n", "Iteration 400/1000, Loss: nan\n", "Iteration 401/1000, Loss: nan\n", "Iteration 402/1000, Loss: nan\n", "Iteration 403/1000, Loss: nan\n", "Iteration 404/1000, Loss: nan\n", "Iteration 405/1000, Loss: nan\n", "Iteration 406/1000, Loss: nan\n", "Iteration 407/1000, Loss: nan\n", "Iteration 408/1000, Loss: nan\n", "Iteration 409/1000, Loss: nan\n", "Iteration 410/1000, Loss: nan\n", "Iteration 411/1000, Loss: nan\n", "Iteration 412/1000, Loss: nan\n", "Iteration 413/1000, Loss: nan\n", "Iteration 414/1000, Loss: nan\n", "Iteration 415/1000, Loss: nan\n", "Iteration 416/1000, Loss: nan\n", "Iteration 417/1000, Loss: nan\n", "Iteration 418/1000, Loss: nan\n", "Iteration 419/1000, Loss: nan\n", "Iteration 420/1000, Loss: nan\n", "Iteration 421/1000, Loss: nan\n", "Iteration 422/1000, Loss: nan\n", "Iteration 423/1000, Loss: nan\n", "Iteration 424/1000, Loss: nan\n", "Iteration 425/1000, Loss: nan\n", "Iteration 426/1000, Loss: nan\n", "Iteration 427/1000, Loss: nan\n", "Iteration 428/1000, Loss: nan\n", "Iteration 429/1000, Loss: nan\n", "Iteration 430/1000, Loss: nan\n", "Iteration 431/1000, Loss: nan\n", "Iteration 432/1000, Loss: nan\n", "Iteration 433/1000, Loss: nan\n", "Iteration 434/1000, Loss: nan\n", "Iteration 435/1000, Loss: nan\n", "Iteration 436/1000, Loss: nan\n", "Iteration 437/1000, Loss: nan\n", "Iteration 438/1000, Loss: nan\n", "Iteration 439/1000, Loss: nan\n", "Iteration 440/1000, Loss: nan\n", "Iteration 441/1000, Loss: nan\n", "Iteration 442/1000, Loss: nan\n", "Iteration 443/1000, Loss: nan\n", "Iteration 444/1000, Loss: nan\n", "Iteration 445/1000, Loss: nan\n", "Iteration 446/1000, Loss: nan\n", "Iteration 447/1000, Loss: nan\n", "Iteration 448/1000, Loss: nan\n", "Iteration 449/1000, Loss: nan\n", "Iteration 450/1000, Loss: nan\n", "Iteration 451/1000, Loss: nan\n", "Iteration 452/1000, Loss: nan\n", "Iteration 453/1000, Loss: nan\n", "Iteration 454/1000, Loss: nan\n", "Iteration 455/1000, Loss: nan\n", "Iteration 456/1000, Loss: nan\n", "Iteration 457/1000, Loss: nan\n", "Iteration 458/1000, Loss: nan\n", "Iteration 459/1000, Loss: nan\n", "Iteration 460/1000, Loss: nan\n", "Iteration 461/1000, Loss: nan\n", "Iteration 462/1000, Loss: nan\n", "Iteration 463/1000, Loss: nan\n", "Iteration 464/1000, Loss: nan\n", "Iteration 465/1000, Loss: nan\n", "Iteration 466/1000, Loss: nan\n", "Iteration 467/1000, Loss: nan\n", "Iteration 468/1000, Loss: nan\n", "Iteration 469/1000, Loss: nan\n", "Iteration 470/1000, Loss: nan\n", "Iteration 471/1000, Loss: nan\n", "Iteration 472/1000, Loss: nan\n", "Iteration 473/1000, Loss: nan\n", "Iteration 474/1000, Loss: nan\n", "Iteration 475/1000, Loss: nan\n", "Iteration 476/1000, Loss: nan\n", "Iteration 477/1000, Loss: nan\n", "Iteration 478/1000, Loss: nan\n", "Iteration 479/1000, Loss: nan\n", "Iteration 480/1000, Loss: nan\n", "Iteration 481/1000, Loss: nan\n", "Iteration 482/1000, Loss: nan\n", "Iteration 483/1000, Loss: nan\n", "Iteration 484/1000, Loss: nan\n", "Iteration 485/1000, Loss: nan\n", "Iteration 486/1000, Loss: nan\n", "Iteration 487/1000, Loss: nan\n", "Iteration 488/1000, Loss: nan\n", "Iteration 489/1000, Loss: nan\n", "Iteration 490/1000, Loss: nan\n", "Iteration 491/1000, Loss: nan\n", "Iteration 492/1000, Loss: nan\n", "Iteration 493/1000, Loss: nan\n", "Iteration 494/1000, Loss: nan\n", "Iteration 495/1000, Loss: nan\n", "Iteration 496/1000, Loss: nan\n", "Iteration 497/1000, Loss: nan\n", "Iteration 498/1000, Loss: nan\n", "Iteration 499/1000, Loss: nan\n", "Iteration 500/1000, Loss: nan\n", "Iteration 501/1000, Loss: nan\n", "Iteration 502/1000, Loss: nan\n", "Iteration 503/1000, Loss: nan\n", "Iteration 504/1000, Loss: nan\n", "Iteration 505/1000, Loss: nan\n", "Iteration 506/1000, Loss: nan\n", "Iteration 507/1000, Loss: nan\n", "Iteration 508/1000, Loss: nan\n", "Iteration 509/1000, Loss: nan\n", "Iteration 510/1000, Loss: nan\n", "Iteration 511/1000, Loss: nan\n", "Iteration 512/1000, Loss: nan\n", "Iteration 513/1000, Loss: nan\n", "Iteration 514/1000, Loss: nan\n", "Iteration 515/1000, Loss: nan\n", "Iteration 516/1000, Loss: nan\n", "Iteration 517/1000, Loss: nan\n", "Iteration 518/1000, Loss: nan\n", "Iteration 519/1000, Loss: nan\n", "Iteration 520/1000, Loss: nan\n", "Iteration 521/1000, Loss: nan\n", "Iteration 522/1000, Loss: nan\n", "Iteration 523/1000, Loss: nan\n", "Iteration 524/1000, Loss: nan\n", "Iteration 525/1000, Loss: nan\n", "Iteration 526/1000, Loss: nan\n", "Iteration 527/1000, Loss: nan\n", "Iteration 528/1000, Loss: nan\n", "Iteration 529/1000, Loss: nan\n", "Iteration 530/1000, Loss: nan\n", "Iteration 531/1000, Loss: nan\n", "Iteration 532/1000, Loss: nan\n", "Iteration 533/1000, Loss: nan\n", "Iteration 534/1000, Loss: nan\n", "Iteration 535/1000, Loss: nan\n", "Iteration 536/1000, Loss: nan\n", "Iteration 537/1000, Loss: nan\n", "Iteration 538/1000, Loss: nan\n", "Iteration 539/1000, Loss: nan\n", "Iteration 540/1000, Loss: nan\n", "Iteration 541/1000, Loss: nan\n", "Iteration 542/1000, Loss: nan\n", "Iteration 543/1000, Loss: nan\n", "Iteration 544/1000, Loss: nan\n", "Iteration 545/1000, Loss: nan\n", "Iteration 546/1000, Loss: nan\n", "Iteration 547/1000, Loss: nan\n", "Iteration 548/1000, Loss: nan\n", "Iteration 549/1000, Loss: nan\n", "Iteration 550/1000, Loss: nan\n", "Iteration 551/1000, Loss: nan\n", "Iteration 552/1000, Loss: nan\n", "Iteration 553/1000, Loss: nan\n", "Iteration 554/1000, Loss: nan\n", "Iteration 555/1000, Loss: nan\n", "Iteration 556/1000, Loss: nan\n", "Iteration 557/1000, Loss: nan\n", "Iteration 558/1000, Loss: nan\n", "Iteration 559/1000, Loss: nan\n", "Iteration 560/1000, Loss: nan\n", "Iteration 561/1000, Loss: nan\n", "Iteration 562/1000, Loss: nan\n", "Iteration 563/1000, Loss: nan\n", "Iteration 564/1000, Loss: nan\n", "Iteration 565/1000, Loss: nan\n", "Iteration 566/1000, Loss: nan\n", "Iteration 567/1000, Loss: nan\n", "Iteration 568/1000, Loss: nan\n", "Iteration 569/1000, Loss: nan\n", "Iteration 570/1000, Loss: nan\n", "Iteration 571/1000, Loss: nan\n", "Iteration 572/1000, Loss: nan\n", "Iteration 573/1000, Loss: nan\n", "Iteration 574/1000, Loss: nan\n", "Iteration 575/1000, Loss: nan\n", "Iteration 576/1000, Loss: nan\n", "Iteration 577/1000, Loss: nan\n", "Iteration 578/1000, Loss: nan\n", "Iteration 579/1000, Loss: nan\n", "Iteration 580/1000, Loss: nan\n", "Iteration 581/1000, Loss: nan\n", "Iteration 582/1000, Loss: nan\n", "Iteration 583/1000, Loss: nan\n", "Iteration 584/1000, Loss: nan\n", "Iteration 585/1000, Loss: nan\n", "Iteration 586/1000, Loss: nan\n", "Iteration 587/1000, Loss: nan\n", "Iteration 588/1000, Loss: nan\n", "Iteration 589/1000, Loss: nan\n", "Iteration 590/1000, Loss: nan\n", "Iteration 591/1000, Loss: nan\n", "Iteration 592/1000, Loss: nan\n", "Iteration 593/1000, Loss: nan\n", "Iteration 594/1000, Loss: nan\n", "Iteration 595/1000, Loss: nan\n", "Iteration 596/1000, Loss: nan\n", "Iteration 597/1000, Loss: nan\n", "Iteration 598/1000, Loss: nan\n", "Iteration 599/1000, Loss: nan\n", "Iteration 600/1000, Loss: nan\n", "Iteration 601/1000, Loss: nan\n", "Iteration 602/1000, Loss: nan\n", "Iteration 603/1000, Loss: nan\n", "Iteration 604/1000, Loss: nan\n", "Iteration 605/1000, Loss: nan\n", "Iteration 606/1000, Loss: nan\n", "Iteration 607/1000, Loss: nan\n", "Iteration 608/1000, Loss: nan\n", "Iteration 609/1000, Loss: nan\n", "Iteration 610/1000, Loss: nan\n", "Iteration 611/1000, Loss: nan\n", "Iteration 612/1000, Loss: nan\n", "Iteration 613/1000, Loss: nan\n", "Iteration 614/1000, Loss: nan\n", "Iteration 615/1000, Loss: nan\n", "Iteration 616/1000, Loss: nan\n", "Iteration 617/1000, Loss: nan\n", "Iteration 618/1000, Loss: nan\n", "Iteration 619/1000, Loss: nan\n", "Iteration 620/1000, Loss: nan\n", "Iteration 621/1000, Loss: nan\n", "Iteration 622/1000, Loss: nan\n", "Iteration 623/1000, Loss: nan\n", "Iteration 624/1000, Loss: nan\n", "Iteration 625/1000, Loss: nan\n", "Iteration 626/1000, Loss: nan\n", "Iteration 627/1000, Loss: nan\n", "Iteration 628/1000, Loss: nan\n", "Iteration 629/1000, Loss: nan\n", "Iteration 630/1000, Loss: nan\n", "Iteration 631/1000, Loss: nan\n", "Iteration 632/1000, Loss: nan\n", "Iteration 633/1000, Loss: nan\n", "Iteration 634/1000, Loss: nan\n", "Iteration 635/1000, Loss: nan\n", "Iteration 636/1000, Loss: nan\n", "Iteration 637/1000, Loss: nan\n", "Iteration 638/1000, Loss: nan\n", "Iteration 639/1000, Loss: nan\n", "Iteration 640/1000, Loss: nan\n", "Iteration 641/1000, Loss: nan\n", "Iteration 642/1000, Loss: nan\n", "Iteration 643/1000, Loss: nan\n", "Iteration 644/1000, Loss: nan\n", "Iteration 645/1000, Loss: nan\n", "Iteration 646/1000, Loss: nan\n", "Iteration 647/1000, Loss: nan\n", "Iteration 648/1000, Loss: nan\n", "Iteration 649/1000, Loss: nan\n", "Iteration 650/1000, Loss: nan\n", "Iteration 651/1000, Loss: nan\n", "Iteration 652/1000, Loss: nan\n", "Iteration 653/1000, Loss: nan\n", "Iteration 654/1000, Loss: nan\n", "Iteration 655/1000, Loss: nan\n", "Iteration 656/1000, Loss: nan\n", "Iteration 657/1000, Loss: nan\n", "Iteration 658/1000, Loss: nan\n", "Iteration 659/1000, Loss: nan\n", "Iteration 660/1000, Loss: nan\n", "Iteration 661/1000, Loss: nan\n", "Iteration 662/1000, Loss: nan\n", "Iteration 663/1000, Loss: nan\n", "Iteration 664/1000, Loss: nan\n", "Iteration 665/1000, Loss: nan\n", "Iteration 666/1000, Loss: nan\n", "Iteration 667/1000, Loss: nan\n", "Iteration 668/1000, Loss: nan\n", "Iteration 669/1000, Loss: nan\n", "Iteration 670/1000, Loss: nan\n", "Iteration 671/1000, Loss: nan\n", "Iteration 672/1000, Loss: nan\n", "Iteration 673/1000, Loss: nan\n", "Iteration 674/1000, Loss: nan\n", "Iteration 675/1000, Loss: nan\n", "Iteration 676/1000, Loss: nan\n", "Iteration 677/1000, Loss: nan\n", "Iteration 678/1000, Loss: nan\n", "Iteration 679/1000, Loss: nan\n", "Iteration 680/1000, Loss: nan\n", "Iteration 681/1000, Loss: nan\n", "Iteration 682/1000, Loss: nan\n", "Iteration 683/1000, Loss: nan\n", "Iteration 684/1000, Loss: nan\n", "Iteration 685/1000, Loss: nan\n", "Iteration 686/1000, Loss: nan\n", "Iteration 687/1000, Loss: nan\n", "Iteration 688/1000, Loss: nan\n", "Iteration 689/1000, Loss: nan\n", "Iteration 690/1000, Loss: nan\n", "Iteration 691/1000, Loss: nan\n", "Iteration 692/1000, Loss: nan\n", "Iteration 693/1000, Loss: nan\n", "Iteration 694/1000, Loss: nan\n", "Iteration 695/1000, Loss: nan\n", "Iteration 696/1000, Loss: nan\n", "Iteration 697/1000, Loss: nan\n", "Iteration 698/1000, Loss: nan\n", "Iteration 699/1000, Loss: nan\n", "Iteration 700/1000, Loss: nan\n", "Iteration 701/1000, Loss: nan\n", "Iteration 702/1000, Loss: nan\n", "Iteration 703/1000, Loss: nan\n", "Iteration 704/1000, Loss: nan\n", "Iteration 705/1000, Loss: nan\n", "Iteration 706/1000, Loss: nan\n", "Iteration 707/1000, Loss: nan\n", "Iteration 708/1000, Loss: nan\n", "Iteration 709/1000, Loss: nan\n", "Iteration 710/1000, Loss: nan\n", "Iteration 711/1000, Loss: nan\n", "Iteration 712/1000, Loss: nan\n", "Iteration 713/1000, Loss: nan\n", "Iteration 714/1000, Loss: nan\n", "Iteration 715/1000, Loss: nan\n", "Iteration 716/1000, Loss: nan\n", "Iteration 717/1000, Loss: nan\n", "Iteration 718/1000, Loss: nan\n", "Iteration 719/1000, Loss: nan\n", "Iteration 720/1000, Loss: nan\n", "Iteration 721/1000, Loss: nan\n", "Iteration 722/1000, Loss: nan\n", "Iteration 723/1000, Loss: nan\n", "Iteration 724/1000, Loss: nan\n", "Iteration 725/1000, Loss: nan\n", "Iteration 726/1000, Loss: nan\n", "Iteration 727/1000, Loss: nan\n", "Iteration 728/1000, Loss: nan\n", "Iteration 729/1000, Loss: nan\n", "Iteration 730/1000, Loss: nan\n", "Iteration 731/1000, Loss: nan\n", "Iteration 732/1000, Loss: nan\n", "Iteration 733/1000, Loss: nan\n", "Iteration 734/1000, Loss: nan\n", "Iteration 735/1000, Loss: nan\n", "Iteration 736/1000, Loss: nan\n", "Iteration 737/1000, Loss: nan\n", "Iteration 738/1000, Loss: nan\n", "Iteration 739/1000, Loss: nan\n", "Iteration 740/1000, Loss: nan\n", "Iteration 741/1000, Loss: nan\n", "Iteration 742/1000, Loss: nan\n", "Iteration 743/1000, Loss: nan\n", "Iteration 744/1000, Loss: nan\n", "Iteration 745/1000, Loss: nan\n", "Iteration 746/1000, Loss: nan\n", "Iteration 747/1000, Loss: nan\n", "Iteration 748/1000, Loss: nan\n", "Iteration 749/1000, Loss: nan\n", "Iteration 750/1000, Loss: nan\n", "Iteration 751/1000, Loss: nan\n", "Iteration 752/1000, Loss: nan\n", "Iteration 753/1000, Loss: nan\n", "Iteration 754/1000, Loss: nan\n", "Iteration 755/1000, Loss: nan\n", "Iteration 756/1000, Loss: nan\n", "Iteration 757/1000, Loss: nan\n", "Iteration 758/1000, Loss: nan\n", "Iteration 759/1000, Loss: nan\n", "Iteration 760/1000, Loss: nan\n", "Iteration 761/1000, Loss: nan\n", "Iteration 762/1000, Loss: nan\n", "Iteration 763/1000, Loss: nan\n", "Iteration 764/1000, Loss: nan\n", "Iteration 765/1000, Loss: nan\n", "Iteration 766/1000, Loss: nan\n", "Iteration 767/1000, Loss: nan\n", "Iteration 768/1000, Loss: nan\n", "Iteration 769/1000, Loss: nan\n", "Iteration 770/1000, Loss: nan\n", "Iteration 771/1000, Loss: nan\n", "Iteration 772/1000, Loss: nan\n", "Iteration 773/1000, Loss: nan\n", "Iteration 774/1000, Loss: nan\n", "Iteration 775/1000, Loss: nan\n", "Iteration 776/1000, Loss: nan\n", "Iteration 777/1000, Loss: nan\n", "Iteration 778/1000, Loss: nan\n", "Iteration 779/1000, Loss: nan\n", "Iteration 780/1000, Loss: nan\n", "Iteration 781/1000, Loss: nan\n", "Iteration 782/1000, Loss: nan\n", "Iteration 783/1000, Loss: nan\n", "Iteration 784/1000, Loss: nan\n", "Iteration 785/1000, Loss: nan\n", "Iteration 786/1000, Loss: nan\n", "Iteration 787/1000, Loss: nan\n", "Iteration 788/1000, Loss: nan\n", "Iteration 789/1000, Loss: nan\n", "Iteration 790/1000, Loss: nan\n", "Iteration 791/1000, Loss: nan\n", "Iteration 792/1000, Loss: nan\n", "Iteration 793/1000, Loss: nan\n", "Iteration 794/1000, Loss: nan\n", "Iteration 795/1000, Loss: nan\n", "Iteration 796/1000, Loss: nan\n", "Iteration 797/1000, Loss: nan\n", "Iteration 798/1000, Loss: nan\n", "Iteration 799/1000, Loss: nan\n", "Iteration 800/1000, Loss: nan\n", "Iteration 801/1000, Loss: nan\n", "Iteration 802/1000, Loss: nan\n", "Iteration 803/1000, Loss: nan\n", "Iteration 804/1000, Loss: nan\n", "Iteration 805/1000, Loss: nan\n", "Iteration 806/1000, Loss: nan\n", "Iteration 807/1000, Loss: nan\n", "Iteration 808/1000, Loss: nan\n", "Iteration 809/1000, Loss: nan\n", "Iteration 810/1000, Loss: nan\n", "Iteration 811/1000, Loss: nan\n", "Iteration 812/1000, Loss: nan\n", "Iteration 813/1000, Loss: nan\n", "Iteration 814/1000, Loss: nan\n", "Iteration 815/1000, Loss: nan\n", "Iteration 816/1000, Loss: nan\n", "Iteration 817/1000, Loss: nan\n", "Iteration 818/1000, Loss: nan\n", "Iteration 819/1000, Loss: nan\n", "Iteration 820/1000, Loss: nan\n", "Iteration 821/1000, Loss: nan\n", "Iteration 822/1000, Loss: nan\n", "Iteration 823/1000, Loss: nan\n", "Iteration 824/1000, Loss: nan\n", "Iteration 825/1000, Loss: nan\n", "Iteration 826/1000, Loss: nan\n", "Iteration 827/1000, Loss: nan\n", "Iteration 828/1000, Loss: nan\n", "Iteration 829/1000, Loss: nan\n", "Iteration 830/1000, Loss: nan\n", "Iteration 831/1000, Loss: nan\n", "Iteration 832/1000, Loss: nan\n", "Iteration 833/1000, Loss: nan\n", "Iteration 834/1000, Loss: nan\n", "Iteration 835/1000, Loss: nan\n", "Iteration 836/1000, Loss: nan\n", "Iteration 837/1000, Loss: nan\n", "Iteration 838/1000, Loss: nan\n", "Iteration 839/1000, Loss: nan\n", "Iteration 840/1000, Loss: nan\n", "Iteration 841/1000, Loss: nan\n", "Iteration 842/1000, Loss: nan\n", "Iteration 843/1000, Loss: nan\n", "Iteration 844/1000, Loss: nan\n", "Iteration 845/1000, Loss: nan\n", "Iteration 846/1000, Loss: nan\n", "Iteration 847/1000, Loss: nan\n", "Iteration 848/1000, Loss: nan\n", "Iteration 849/1000, Loss: nan\n", "Iteration 850/1000, Loss: nan\n", "Iteration 851/1000, Loss: nan\n", "Iteration 852/1000, Loss: nan\n", "Iteration 853/1000, Loss: nan\n", "Iteration 854/1000, Loss: nan\n", "Iteration 855/1000, Loss: nan\n", "Iteration 856/1000, Loss: nan\n", "Iteration 857/1000, Loss: nan\n", "Iteration 858/1000, Loss: nan\n", "Iteration 859/1000, Loss: nan\n", "Iteration 860/1000, Loss: nan\n", "Iteration 861/1000, Loss: nan\n", "Iteration 862/1000, Loss: nan\n", "Iteration 863/1000, Loss: nan\n", "Iteration 864/1000, Loss: nan\n", "Iteration 865/1000, Loss: nan\n", "Iteration 866/1000, Loss: nan\n", "Iteration 867/1000, Loss: nan\n", "Iteration 868/1000, Loss: nan\n", "Iteration 869/1000, Loss: nan\n", "Iteration 870/1000, Loss: nan\n", "Iteration 871/1000, Loss: nan\n", "Iteration 872/1000, Loss: nan\n", "Iteration 873/1000, Loss: nan\n", "Iteration 874/1000, Loss: nan\n", "Iteration 875/1000, Loss: nan\n", "Iteration 876/1000, Loss: nan\n", "Iteration 877/1000, Loss: nan\n", "Iteration 878/1000, Loss: nan\n", "Iteration 879/1000, Loss: nan\n", "Iteration 880/1000, Loss: nan\n", "Iteration 881/1000, Loss: nan\n", "Iteration 882/1000, Loss: nan\n", "Iteration 883/1000, Loss: nan\n", "Iteration 884/1000, Loss: nan\n", "Iteration 885/1000, Loss: nan\n", "Iteration 886/1000, Loss: nan\n", "Iteration 887/1000, Loss: nan\n", "Iteration 888/1000, Loss: nan\n", "Iteration 889/1000, Loss: nan\n", "Iteration 890/1000, Loss: nan\n", "Iteration 891/1000, Loss: nan\n", "Iteration 892/1000, Loss: nan\n", "Iteration 893/1000, Loss: nan\n", "Iteration 894/1000, Loss: nan\n", "Iteration 895/1000, Loss: nan\n", "Iteration 896/1000, Loss: nan\n", "Iteration 897/1000, Loss: nan\n", "Iteration 898/1000, Loss: nan\n", "Iteration 899/1000, Loss: nan\n", "Iteration 900/1000, Loss: nan\n", "Iteration 901/1000, Loss: nan\n", "Iteration 902/1000, Loss: nan\n", "Iteration 903/1000, Loss: nan\n", "Iteration 904/1000, Loss: nan\n", "Iteration 905/1000, Loss: nan\n", "Iteration 906/1000, Loss: nan\n", "Iteration 907/1000, Loss: nan\n", "Iteration 908/1000, Loss: nan\n", "Iteration 909/1000, Loss: nan\n", "Iteration 910/1000, Loss: nan\n", "Iteration 911/1000, Loss: nan\n", "Iteration 912/1000, Loss: nan\n", "Iteration 913/1000, Loss: nan\n", "Iteration 914/1000, Loss: nan\n", "Iteration 915/1000, Loss: nan\n", "Iteration 916/1000, Loss: nan\n", "Iteration 917/1000, Loss: nan\n", "Iteration 918/1000, Loss: nan\n", "Iteration 919/1000, Loss: nan\n", "Iteration 920/1000, Loss: nan\n", "Iteration 921/1000, Loss: nan\n", "Iteration 922/1000, Loss: nan\n", "Iteration 923/1000, Loss: nan\n", "Iteration 924/1000, Loss: nan\n", "Iteration 925/1000, Loss: nan\n", "Iteration 926/1000, Loss: nan\n", "Iteration 927/1000, Loss: nan\n", "Iteration 928/1000, Loss: nan\n", "Iteration 929/1000, Loss: nan\n", "Iteration 930/1000, Loss: nan\n", "Iteration 931/1000, Loss: nan\n", "Iteration 932/1000, Loss: nan\n", "Iteration 933/1000, Loss: nan\n", "Iteration 934/1000, Loss: nan\n", "Iteration 935/1000, Loss: nan\n", "Iteration 936/1000, Loss: nan\n", "Iteration 937/1000, Loss: nan\n", "Iteration 938/1000, Loss: nan\n", "Iteration 939/1000, Loss: nan\n", "Iteration 940/1000, Loss: nan\n", "Iteration 941/1000, Loss: nan\n", "Iteration 942/1000, Loss: nan\n", "Iteration 943/1000, Loss: nan\n", "Iteration 944/1000, Loss: nan\n", "Iteration 945/1000, Loss: nan\n", "Iteration 946/1000, Loss: nan\n", "Iteration 947/1000, Loss: nan\n", "Iteration 948/1000, Loss: nan\n", "Iteration 949/1000, Loss: nan\n", "Iteration 950/1000, Loss: nan\n", "Iteration 951/1000, Loss: nan\n", "Iteration 952/1000, Loss: nan\n", "Iteration 953/1000, Loss: nan\n", "Iteration 954/1000, Loss: nan\n", "Iteration 955/1000, Loss: nan\n", "Iteration 956/1000, Loss: nan\n", "Iteration 957/1000, Loss: nan\n", "Iteration 958/1000, Loss: nan\n", "Iteration 959/1000, Loss: nan\n", "Iteration 960/1000, Loss: nan\n", "Iteration 961/1000, Loss: nan\n", "Iteration 962/1000, Loss: nan\n", "Iteration 963/1000, Loss: nan\n", "Iteration 964/1000, Loss: nan\n", "Iteration 965/1000, Loss: nan\n", "Iteration 966/1000, Loss: nan\n", "Iteration 967/1000, Loss: nan\n", "Iteration 968/1000, Loss: nan\n", "Iteration 969/1000, Loss: nan\n", "Iteration 970/1000, Loss: nan\n", "Iteration 971/1000, Loss: nan\n", "Iteration 972/1000, Loss: nan\n", "Iteration 973/1000, Loss: nan\n", "Iteration 974/1000, Loss: nan\n", "Iteration 975/1000, Loss: nan\n", "Iteration 976/1000, Loss: nan\n", "Iteration 977/1000, Loss: nan\n", "Iteration 978/1000, Loss: nan\n", "Iteration 979/1000, Loss: nan\n", "Iteration 980/1000, Loss: nan\n", "Iteration 981/1000, Loss: nan\n", "Iteration 982/1000, Loss: nan\n", "Iteration 983/1000, Loss: nan\n", "Iteration 984/1000, Loss: nan\n", "Iteration 985/1000, Loss: nan\n", "Iteration 986/1000, Loss: nan\n", "Iteration 987/1000, Loss: nan\n", "Iteration 988/1000, Loss: nan\n", "Iteration 989/1000, Loss: nan\n", "Iteration 990/1000, Loss: nan\n", "Iteration 991/1000, Loss: nan\n", "Iteration 992/1000, Loss: nan\n", "Iteration 993/1000, Loss: nan\n", "Iteration 994/1000, Loss: nan\n", "Iteration 995/1000, Loss: nan\n", "Iteration 996/1000, Loss: nan\n", "Iteration 997/1000, Loss: nan\n", "Iteration 998/1000, Loss: nan\n", "Iteration 999/1000, Loss: nan\n", "Iteration 1000/1000, Loss: nan\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "c:\\Users\\pthomas\\AppData\\Local\\Programs\\Python\\Python311\\Lib\\site-packages\\matplotlib\\scale.py:255: RuntimeWarning: overflow encountered in power\n", " return np.power(self.base, values)\n" ] }, { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAlYAAAHHCAYAAAB9dxZkAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAABUPElEQVR4nO3dd1gU5/428HsB6bCEIggKKKICIhoUQowNiEgMiHqiUaOINQZjFD2Wc44tx+6xRYk1ijFq0NgSEwsi9oYCiV1UbCgoFhQslH3eP/KyP9cFZGEQF+/Pde11uc88M/OdHXb3dubZGZkQQoCIiIiIKkynqgsgIiIiqi4YrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisqNrr27cvnJ2dVdpkMhkmTZpUJfUQSWHSpEmQyWRVXYYkrl27BplMhpiYGI3njYmJgUwmw7Vr1ySvi6g8GKyo0qSlpWHo0KFo0KABjI2NYWxsDHd3d0RGRuKvv/6q6vIq3bp16zB//vwy93d2doZMJoNMJoOOjg4sLCzg6emJQYMG4fjx45VXaBW6ffs2Jk2ahJSUlDL1L/oSPXnyZOUWVgmKwkPRQ1dXF46OjujcuXOZt5/KryiIFj2MjY3h6OiIkJAQrFq1Ci9evKjqEqvM06dPER0djfbt26NWrVowMzNDs2bNsHjxYhQWFlZ1eVpHr6oLoOpp+/bt6N69O/T09NCrVy94eXlBR0cHFy5cwObNm7F48WKkpaXBycmpSup79uwZ9PQq989/3bp1OHPmDIYPH17meZo2bYqRI0cCAJ48eYLz589j48aNWL58OUaMGIG5c+dWUrVV4/bt25g8eTKcnZ3RtGnTqi7njejRowc++eQTFBYW4vz581i8eDF27NiBY8eOvTOvwaucnJzw7Nkz1KhRo9LXtXjxYpiamuLFixdIT0/Hrl270K9fP8yfPx/bt29HnTp1Kr2Gt83Vq1fx9ddfIyAgAFFRUTA3N8euXbvw1Vdf4dixY1i9enVVl6hVGKxIcleuXMHnn38OJycnxMfHo1atWirTZ86cie+//x46OqUfMM3NzYWJiUml1GhoaFgpy60oBwcHfPHFFyptM2fORM+ePTFv3jy4urpiyJAhVVQdSeH9999X2cctW7ZEaGgoFi9ejKVLl1ZhZVVHJpO9sffkP/7xD1hbWyufT5gwAWvXrkWfPn3w2Wef4dixY2+kjiIKhQJ5eXlV+plkZ2eH06dPw8PDQ9k2ePBg9OvXD6tWrcL48eNRv379KqtP2/BUIElu1qxZyM3NxapVq9RCFQDo6elh2LBhKv8z7Nu3L0xNTXHlyhV88sknMDMzQ69evQAABw8exGeffQZHR0cYGBigTp06GDFiBJ49e6a27K1bt6Jx48YwNDRE48aNsWXLlmJrLG6MVXp6Ovr16wdbW1sYGBjAw8MDK1euVOmzb98+yGQybNiwAVOnTkXt2rVhaGiIgIAAXL58Wdmvbdu2+P3333H9+nXlqYdXx3mVlZGREdasWQNLS0tMnToVQgjlNIVCgfnz58PDwwOGhoawtbXF4MGD8fDhQ5VlnDx5EkFBQbC2toaRkRHq1q2Lfv36qfRRKBRYsGABPD09YWhoCBsbG3To0EHttNtPP/0Eb29vGBkZwdLSEp9//jlu3ryp0qdt27Zo3Lgxzp07h3bt2sHY2BgODg6YNWuWymvZokULAEBERITydSrPOJtXJScnIzg4GObm5jA1NUVAQIDaF2Z+fj4mT54MV1dXGBoawsrKCh999BHi4uKUfTIyMhAREYHatWvDwMAAtWrVQqdOnSQdz+Pv7w/g71PnRTZu3Kh8ja2trfHFF18gPT291OW0adMGXl5exU5r2LAhgoKCAPzfKcn//e9/WLZsGVxcXGBgYIAWLVogMTFRbd69e/eiVatWMDExgYWFBTp16oTz58+r9Ck6zXbp0iV88cUXkMvlsLGxwfjx4yGEwM2bN9GpUyeYm5vDzs4Oc+bMUZm/uDFWf/31F/r27Yt69erB0NAQdnZ26NevH+7fv1/q61AevXr1woABA3D8+HGV/Q8Ax48fR4cOHSCXy2FsbIw2bdrg8OHDasvYt28fmjdvDkNDQ7i4uGDp0qXFjoOTyWQYOnQo1q5dCw8PDxgYGGDnzp0AyvYZBAAvXrzAxIkTUb9+feVn4ujRo9VOZ2ZlZeHChQt4+vRpqdtvbW2tEqqKdO7cGQDU9jeVjkesSHLbt29H/fr14evrq9F8BQUFCAoKwkcffYT//e9/MDY2BvD3l8zTp08xZMgQWFlZ4cSJE1i4cCFu3bqFjRs3KuffvXs3unbtCnd3d0yfPh33799Xfim+TmZmJj744APlh56NjQ127NiB/v374/Hjx2qn82bMmAEdHR2MGjUK2dnZmDVrFnr16qUcC/Xvf/8b2dnZuHXrFubNmwcAMDU11ej1eJmpqSk6d+6MH374AefOnVN+CA4ePBgxMTGIiIjAsGHDkJaWhkWLFiE5ORmHDx9GjRo1cPfuXbRv3x42NjYYO3YsLCwscO3aNWzevFllHf3790dMTAyCg4MxYMAAFBQU4ODBgzh27BiaN28OAJg6dSrGjx+Pbt26YcCAAbh37x4WLlyI1q1bIzk5GRYWFsrlPXz4EB06dECXLl3QrVs3/PLLLxgzZgw8PT0RHBwMNzc3fPvtt5gwYQIGDRqEVq1aAQA+/PDDcr9OAHD27Fm0atUK5ubmGD16NGrUqIGlS5eibdu22L9/v/LvctKkSZg+fToGDBgAHx8fPH78GCdPnkRSUhI+/vhjAEDXrl1x9uxZfP3113B2dsbdu3cRFxeHGzdulDsov+rKlSsAACsrKwBQ7s8WLVpg+vTpyMzMxIIFC3D48GG11/hlvXv3xsCBA3HmzBk0btxY2Z6YmIhLly7hP//5j0r/devW4cmTJxg8eDBkMhlmzZqFLl264OrVq8pTcnv27EFwcDDq1auHSZMm4dmzZ1i4cCFatmyJpKQktdege/fucHNzw4wZM/D7779jypQpsLS0xNKlS+Hv74+ZM2di7dq1GDVqFFq0aIHWrVuX+LrExcXh6tWriIiIgJ2dHc6ePYtly5bh7NmzOHbsmOQD93v37o1ly5Zh9+7dyv2/d+9eBAcHw9vbGxMnToSOjg5WrVoFf39/HDx4ED4+PgD+DvIdOnRArVq1MHnyZBQWFuLbb7+FjY1Nsevau3cvNmzYgKFDh8La2hrOzs5l/gxSKBQIDQ3FoUOHMGjQILi5ueH06dOYN28eLl26hK1btyrXs2jRIkyePBkJCQlo27atxq9JRkYGAKgc4aMyEEQSys7OFgBEWFiY2rSHDx+Ke/fuKR9Pnz5VTgsPDxcAxNixY9Xme7lfkenTpwuZTCauX7+ubGvatKmoVauWePTokbJt9+7dAoBwcnJSmR+AmDhxovJ5//79Ra1atURWVpZKv88//1zI5XJlDQkJCQKAcHNzEy9evFD2W7BggQAgTp8+rWzr2LGj2npL4+TkJDp27Fji9Hnz5gkAYtu2bUIIIQ4ePCgAiLVr16r027lzp0r7li1bBACRmJhY4rL37t0rAIhhw4apTVMoFEIIIa5duyZ0dXXF1KlTVaafPn1a6OnpqbS3adNGABA//vijsu3FixfCzs5OdO3aVdmWmJgoAIhVq1aVWNvLVq1a9dptCQsLE/r6+uLKlSvKttu3bwszMzPRunVrZZuXl1epr/fDhw8FADF79uwy1fY6aWlpAoCYPHmyuHfvnsjIyBD79u0TzZo1EwDEpk2bRF5enqhZs6Zo3LixePbsmXLe7du3CwBiwoQJyraJEyeKlz/CHz16JAwNDcWYMWNU1jts2DBhYmIicnJyVOqwsrISDx48UPbbtm2bACB+++03ZVvTpk1FzZo1xf3795Vtf/75p9DR0RF9+vRRq2XQoEHKtoKCAlG7dm0hk8nEjBkzlO0PHz4URkZGIjw8XO21efnvoLj3/fr16wUAceDAAWVb0d9EWlqaWv+XFdV47969YqcX7e/OnTsLIf7+u3d1dRVBQUHK90BRXXXr1hUff/yxsi0kJEQYGxuL9PR0ZVtqaqrQ09MTr37NAhA6Ojri7NmzKu1l/Qxas2aN0NHREQcPHlTpt2TJEgFAHD58WG2bExISSn1tivPixQvh7u4u6tatK/Lz8zWe/13GU4EkqcePHwMo/uhM27ZtYWNjo3xER0er9Slu/JCRkZHy37m5ucjKysKHH34IIQSSk5MBAHfu3EFKSgrCw8Mhl8uV/T/++GO4u7uXWrMQAps2bUJISAiEEMjKylI+goKCkJ2djaSkJJV5IiIioK+vr3xedLTl6tWrpa6rIope0ydPngD4+0ieXC7Hxx9/rFKzt7c3TE1NkZCQAADKIxzbt29Hfn5+scvetGkTZDIZJk6cqDat6MjA5s2boVAo0K1bN5X12dnZwdXVVbm+l+t9eSyRvr4+fHx8KvU1KiwsxO7duxEWFoZ69eop22vVqoWePXvi0KFDyr9RCwsLnD17FqmpqcUuy8jICPr6+ti3b5/aqdWKmDhxImxsbGBnZ4e2bdviypUrmDlzJrp06YKTJ0/i7t27+Oqrr1TG3HTs2BGNGjXC77//XuJy5XI5OnXqhPXr1ytPFxcWFiI2NhZhYWFq4xW7d++O9957T/n81b/hovdU3759YWlpqezXpEkTfPzxx/jjjz/UahgwYIDy37q6umjevDmEEOjfv7+y3cLCAg0bNnzt38HL7/vnz58jKysLH3zwAQCovR+l8Or7KyUlBampqejZsyfu37+v/HvPzc1FQEAADhw4AIVCgcLCQuzZswdhYWGwt7dXLq9+/foIDg4udl1t2rRR+VzS5DNo48aNcHNzQ6NGjVT6FZ1Sfvl9OGnSJAghynW0aujQoTh37hwWLVpU6T/0qW74apGkzMzMAAA5OTlq05YuXYonT54gMzNTbYA28PfYq+JO2924cQMTJkzAr7/+qvYFl52dDQC4fv06AMDV1VVt/oYNG5b6QXzv3j08evQIy5Ytw7Jly4rtc/fuXZXnjo6OKs+LvqCk/AJ+VdFrWvQap6amIjs7GzVr1iy2f1HNbdq0QdeuXTF58mTMmzcPbdu2RVhYGHr27AkDAwMAf5+Osre3V/kCfVVqaiqEEMW+xgDUftFVu3ZttdM17733XqVeauPevXt4+vQpGjZsqDbNzc0NCoUCN2/ehIeHB7799lt06tQJDRo0QOPGjdGhQwf07t0bTZo0AQAYGBhg5syZGDlyJGxtbfHBBx/g008/RZ8+fWBnZ1fuGgcNGoTPPvtMeUmNonE2wP/9HRdXf6NGjXDo0KFSl92nTx/Exsbi4MGDaN26Nfbs2YPMzEz07t1bre/r/oZLq8XNzQ27du1S+4HJq8uUy+UwNDRUO5Ukl8tfO1bqwYMHmDx5Mn7++We191/R+15Kxb2/ACA8PLzEebKzs/H8+XM8e/as2MHdJQ34rlu3rspzTT6DUlNTcf78+RJPM776WpXH7NmzsXz5cvz3v//FJ598UuHlvWsYrEhScrkctWrVwpkzZ9SmFY1tKWngr4GBgdovBQsLC/Hxxx/jwYMHGDNmDBo1agQTExOkp6ejb9++UCgUFa65aBlffPFFiR+iRV+2RXR1dYvtJ14aWC61ote06MNaoVCgZs2aWLt2bbH9iz54ZTIZfvnlFxw7dgy//fab8uflc+bMwbFjx8o89kuhUEAmk2HHjh3Fbv+ry6mK10gTrVu3xpUrV7Bt2zbs3r0bK1aswLx587BkyRLlkZfhw4cjJCQEW7duxa5duzB+/HhMnz4de/fuRbNmzcq1XldXVwQGBkq5KUpBQUGwtbXFTz/9hNatW+Onn36CnZ1dseurjP1T3DLLu55u3brhyJEj+Oc//4mmTZvC1NQUCoUCHTp0kOR9/6ri3l/A3yGjpMtgmJqa4vnz5xqv6+WjcS+vqyyfQQqFAp6eniVeeqWil4uIiYnBmDFj8OWXX6qNy6OyYbAiyXXs2BErVqzAiRMnlIM7y+v06dO4dOkSVq9ejT59+ijbX/3lTtH1sIo7rXPx4sVS12FjYwMzMzMUFhZK+oUn5eDanJwcbNmyBXXq1IGbmxsAwMXFBXv27EHLli3VPqiL88EHH+CDDz7A1KlTsW7dOvTq1Qs///wzBgwYABcXF+zatQsPHjwo8aiVi4sLhBCoW7cuGjRoIMl2ST0A2cbGBsbGxsXu8wsXLkBHR0fli8fS0hIRERGIiIhATk4OWrdujUmTJqmc0nJxccHIkSMxcuRIpKamomnTppgzZw5++uknSWsH/u/v+OLFi8pTO0UuXrz42uu+6erqomfPnoiJicHMmTOxdetWDBw4sMRwU9ZaXnXhwgVYW1tX2uVQHj58iPj4eEyePBkTJkxQtpd02lYKa9asAQDlryddXFwAAObm5qV+LtSsWROGhoYqvwouUlxbcTT5DHJxccGff/6JgIAAyd8/27Ztw4ABA9ClS5dih2pQ2XCMFUlu9OjRMDY2Rr9+/ZCZmak2XZP/ERd9Ibw8jxACCxYsUOlXq1YtNG3aFKtXr1Y5TRAXF4dz5869dh1du3bFpk2bij3Sdu/evTLX+zITExNJTlk8e/YMvXv3xoMHD/Dvf/9b+WHarVs3FBYW4r///a/aPAUFBXj06BGAv7+kXn3Ni/4HXvTz7K5du0IIgcmTJ6stq2jeLl26QFdXF5MnT1ZbnhCiXD+DL/piLqq1onR1ddG+fXts27ZN5choZmYm1q1bh48++gjm5uYAoFavqakp6tevr3xNnj59qnY0wsXFBWZmZpV2le7mzZujZs2aWLJkico6duzYgfPnz6Njx46vXUbv3r3x8OFDDB48GDk5OcWedi+Ll99TL++fM2fOYPfu3ZV6iqi49z0Aje5koIl169ZhxYoV8PPzQ0BAAADA29sbLi4u+N///lfs0IaizwVdXV0EBgZi69atuH37tnL65cuXsWPHjjKtX5PPoG7duiE9PR3Lly9X6/fs2TPk5uYqn5f1cgsAcODAAXz++edo3bo11q5d+9rrDFLJeMSKJOfq6op169ahR48eaNiwofLK60IIpKWlYd26ddDR0SnTZRAaNWoEFxcXjBo1Cunp6TA3N8emTZuKHcs0ffp0dOzYER999BH69euHBw8eYOHChfDw8Cj2g/FlM2bMQEJCAnx9fTFw4EC4u7vjwYMHSEpKwp49e/DgwQONXwdvb2/ExsYiKioKLVq0gKmpKUJCQkqdJz09XXkkJCcnB+fOncPGjRuRkZGBkSNHYvDgwcq+bdq0weDBgzF9+nSkpKSgffv2qFGjBlJTU7Fx40YsWLAA//jHP7B69Wp8//336Ny5M1xcXPDkyRMsX74c5ubmyi/Hdu3aoXfv3vjuu++QmpqqPN1y8OBBtGvXDkOHDoWLiwumTJmCcePG4dq1awgLC4OZmRnS0tKwZcsWDBo0CKNGjdLoNXJxcYGFhQWWLFkCMzMzmJiYwNfXV20MyqtWrlypvPbPy7755htMmTIFcXFx+Oijj/DVV19BT08PS5cuxYsXL1Suo+Xu7o62bdvC29sblpaWOHnyJH755RcMHToUAHDp0iUEBASgW7ducHd3h56eHrZs2YLMzEx8/vnnyuUUXR5h1apV6Nu3r0bb/6oaNWpg5syZiIiIQJs2bdCjRw/l5RacnZ0xYsSI1y6jWbNmaNy4sXKQ8/vvv1/uembPno3g4GD4+fmhf//+ysstyOXySr3Xprm5OVq3bo1Zs2YhPz8fDg4O2L17t8q1vsrrl19+gampKfLy8pRXXj98+DC8vLxULt+io6ODFStWIDg4GB4eHoiIiICDgwPS09ORkJAAc3Nz/PbbbwD+HiS+e/dutGzZEkOGDEFhYSEWLVqExo0bl/l2RWX9DOrduzc2bNiAL7/8EgkJCWjZsiUKCwtx4cIFbNiwAbt27VJeHqWsl1u4fv06QkNDIZPJ8I9//EPldQD+Pg356nAIKsWb+wEivWsuX74shgwZIurXry8MDQ2FkZGRaNSokfjyyy9FSkqKSt/w8HBhYmJS7HLOnTsnAgMDhampqbC2thYDBw4Uf/75Z7E/09+0aZNwc3MTBgYGwt3dXWzevFmEh4e/9nILQgiRmZkpIiMjRZ06dUSNGjWEnZ2dCAgIEMuWLVP2KbrcwsaNG1XmLe7n4jk5OaJnz57CwsKi2Es+vMrJyUkAEACETCYT5ubmwsPDQwwcOFAcP368xPmWLVsmvL29hZGRkTAzMxOenp5i9OjR4vbt20IIIZKSkkSPHj2Eo6OjMDAwEDVr1hSffvqpOHnypMpyCgoKxOzZs0WjRo2Evr6+sLGxEcHBweLUqVNqr/FHH30kTExMhImJiWjUqJGIjIwUFy9eVPZp06aN8PDwUKu1uH2xbds24e7urvxpemmXXij6aX1Jj5s3byq3OSgoSJiamgpjY2PRrl07ceTIEZVlTZkyRfj4+AgLCwvl3+bUqVNFXl6eEEKIrKwsERkZKRo1aiRMTEyEXC4Xvr6+YsOGDSrLWbhwoQAgdu7cWWLdQvzf30hZLt8QGxsrmjVrJgwMDISlpaXo1auXuHXrlkqfVy+38LJZs2YJAGLatGka1VHc+2LPnj2iZcuWwsjISJibm4uQkBBx7ty5Ymt59VIGJb2vX/37KO79c+vWLdG5c2dhYWEh5HK5+Oyzz8Tt27fVatT0cgtFD0NDQ1G7dm3x6aefipUrV4rnz58XO19ycrLo0qWLsLKyEgYGBsLJyUl069ZNxMfHq/SLj48XzZo1E/r6+sLFxUWsWLFCjBw5UhgaGqr0AyAiIyOLXVdZPoOEECIvL0/MnDlTeHh4CAMDA/Hee+8Jb29vMXnyZJGdna22za+73ELR51pJj1f/Jqh0MiHekpGkRERaqFu3brh27RpOnDhR1aUoLViwACNGjMC1a9fUfqlHb05YWFipl/Sg6oknUYmIykkIgX379mHKlClVXYqSEAI//PAD2rRpw1D1Br16i63U1FT88ccf5bqGFGk3jrF6jc6dO2Pfvn0ICAjAL7/8UtXlENFbRCaTSXLdICnk5ubi119/RUJCAk6fPo1t27ZVdUnvlHr16invbXj9+nUsXrwY+vr6GD16dFWXRm8YTwW+xr59+/DkyROsXr2awYqI3lrXrl1D3bp1YWFhga+++gpTp06t6pLeKREREUhISEBGRgYMDAzg5+eHadOmVejHA6SdGKzKYN++fVi0aBGDFREREZWqWo+xOnDgAEJCQmBvbw+ZTKZy1+8i0dHRcHZ2hqGhIXx9fd+qAahERESkXap1sMrNzYWXl1eJV5AtusbQxIkTkZSUBC8vLwQFBb01YyaIiIhIu1TrwevBwcEl3l0cAObOnYuBAwciIiICALBkyRL8/vvvWLlyJcaOHavx+l68eKFytWSFQoEHDx7AyspK8lsPEBERUeUQQuDJkyewt7fX+Cr01TpYlSYvLw+nTp3CuHHjlG06OjoIDAzE0aNHy7XM6dOnF3tLECIiItI+N2/eLNNdQl72zgarrKwsFBYWwtbWVqXd1tYWFy5cUD4PDAzEn3/+idzcXNSuXRsbN26En59fscscN24coqKilM+zs7Ph6OiImzdvKu9PRkRERG+3x48fo06dOjAzM9N43nc2WJXVnj17ytzXwMAABgYGau3m5uYMVkRERFqmPMN4qvXg9dJYW1tDV1cXmZmZKu2ZmZmws7OroqqIiIhIm72zwUpfXx/e3t6Ij49XtikUCsTHx5d4qo+IiIioNNX6VGBOTg4uX76sfJ6WloaUlBRYWlrC0dERUVFRCA8PR/PmzeHj44P58+cjNzdX+StBIiIiIk1U62B18uRJtGvXTvm8aGB5eHg4YmJi0L17d9y7dw8TJkxARkYGmjZtip07d6oNaCciIiIqC97SphI9fvwYcrkc2dnZHLxORESkJSry/f3OjrEiIiIikhqDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCINVJYiOjoa7uztatGhR1aUQERHRG8Rb2lQi3tKGiIhI+/CWNkRERERvAQYrIiIiIokwWBERERFJhMGKiIiISCIMVkREREQSYbAiIiIikgiDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGq0oQHR0Nd3d3tGjRoqpLISIiojdIJoQQVV1EdfX48WPI5XJkZ2fD3Ny8qsshIiKiMqjI9zePWBERERFJhMGKiIiISCIMVkREREQSYbAiIiIikgiDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwqgTR0dFwd3dHixYtqroUIiIieoNkQghR1UVUV48fP4ZcLkd2djbMzc2ruhwiIiIqg4p8f/OIFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisNLB9+3Y0bNgQrq6uWLFiRVWXQ0RERG8ZvaouQFsUFBQgKioKCQkJkMvl8Pb2RufOnWFlZVXVpREREdFbgkesyujEiRPw8PCAg4MDTE1NERwcjN27d1d1WURERPQWeSuCVXp6Or744gtYWVnByMgInp6eOHnypGTLP3DgAEJCQmBvbw+ZTIatW7cW2y86OhrOzs4wNDSEr68vTpw4oZx2+/ZtODg4KJ87ODggPT1dshqJiIhI+1V5sHr48CFatmyJGjVqYMeOHTh37hzmzJmD9957r9j+hw8fRn5+vlr7uXPnkJmZWew8ubm58PLyQnR0dIl1xMbGIioqChMnTkRSUhK8vLwQFBSEu3fvlm/DiIiI6J1T5cFq5syZqFOnDlatWgUfHx/UrVsX7du3h4uLi1pfhUKByMhI9OzZE4WFhcr2ixcvwt/fH6tXry52HcHBwZgyZQo6d+5cYh1z587FwIEDERERAXd3dyxZsgTGxsZYuXIlAMDe3l7lCFV6ejrs7e3Lu9lERERUDVV5sPr111/RvHlzfPbZZ6hZsyaaNWuG5cuXF9tXR0cHf/zxB5KTk9GnTx8oFApcuXIF/v7+CAsLw+jRo8tVQ15eHk6dOoXAwECVdQUGBuLo0aMAAB8fH5w5cwbp6enIycnBjh07EBQUVOzyoqOj4e7ujhYtWpSrHiIiItJOVR6srl69isWLF8PV1RW7du3CkCFDMGzYsBKPPtnb22Pv3r04dOgQevbsCX9/fwQGBmLx4sXlriErKwuFhYWwtbVVabe1tUVGRgYAQE9PD3PmzEG7du3QtGlTjBw5ssRfBEZGRuLcuXNITEwsd01ERESkfar8cgsKhQLNmzfHtGnTAADNmjXDmTNnsGTJEoSHhxc7j6OjI9asWYM2bdqgXr16+OGHHyCTySq91tDQUISGhlb6eoiIiEg7VfkRq1q1asHd3V2lzc3NDTdu3ChxnszMTAwaNAghISF4+vQpRowYUaEarK2toaurqzb4PTMzE3Z2dhVaNhEREb07qjxYtWzZEhcvXlRpu3TpEpycnIrtn5WVhYCAALi5uWHz5s2Ij49HbGwsRo0aVe4a9PX14e3tjfj4eGWbQqFAfHw8/Pz8yr1cIiIierdU+anAESNG4MMPP8S0adPQrVs3nDhxAsuWLcOyZcvU+ioUCgQHB8PJyQmxsbHQ09ODu7s74uLi4O/vDwcHh2KPXuXk5ODy5cvK52lpaUhJSYGlpSUcHR0BAFFRUQgPD0fz5s3h4+OD+fPnIzc3FxEREZW38URERFStyIQQoqqL2L59O8aNG4fU1FTUrVsXUVFRGDhwYLF94+Li0KpVKxgaGqq0Jycnw8bGBrVr11abZ9++fWjXrp1ae3h4OGJiYpTPFy1ahNmzZyMjIwNNmzbFd999B19f33Jv1+PHjyGXy5GdnQ1zc/NyL4eIiIjenIp8f78Vwaq6YrAiIiLSPhX5/q7yMVZERERE1QWDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisiIiIiCTCYEVEREQkEQYrIiIiIokwWFWC6OhouLu7o0WLFlVdChEREb1BMiGEqOoiqqvHjx9DLpcjOzsb5ubmVV0OERERlUFFvr95xIqIiIhIIgxWRERERBJhsCIiIiKSiEbBqqCgAN9++y1u3bpVWfUQERERaS2NgpWenh5mz56NgoKCyqqHiIiISGtpfCrQ398f+/fvr4xaiIiIiLSanqYzBAcHY+zYsTh9+jS8vb1hYmKiMj00NFSy4oiIiIi0icbXsdLRKfkgl0wmQ2FhYYWLqi54HSsiIiLtU5Hvb42PWCkUCk1nISIiInon8HILRERERBIpV7Dav38/QkJCUL9+fdSvXx+hoaE4ePCg1LURERERaRWNg9VPP/2EwMBAGBsbY9iwYRg2bBiMjIwQEBCAdevWVUaNRERERFpB48Hrbm5uGDRoEEaMGKHSPnfuXCxfvhznz5+XtEBtxsHrRERE2ueN3oT56tWrCAkJUWsPDQ1FWlqaposjIiIiqjY0DlZ16tRBfHy8WvuePXtQp04dSYoiIiIi0kYaX25h5MiRGDZsGFJSUvDhhx8CAA4fPoyYmBgsWLBA8gKJiIiItIXGwWrIkCGws7PDnDlzsGHDBgB/j7uKjY1Fp06dJC+QiIiISFtoFKwKCgowbdo09OvXD4cOHaqsmoiIiIi0kkZjrPT09DBr1iwUFBRUVj1EREREWkvjwesBAQHYv39/ZdRCREREpNU0HmMVHByMsWPH4vTp0/D29oaJiYnK9NDQUMmKIyIiItImGl8gVEen5INcMpkMhYWFFS6quuAFQomIiLRPRb6/NT5ipVAoNJ2FiIiI6J2g0Rir/Px86Onp4cyZM5VVDxEREZHW0ihY1ahRA46OjjzdR0RERFQMjX8V+O9//xv/+te/8ODBg8qoh4iIiEhraTzGatGiRbh8+TLs7e3h5OSk9qvApKQkyYojIiIi0iYaB6uwsLBKKIOIiIhI+2l8uYV32fbt2zFy5EgoFAqMGTMGAwYMKLU/L7dARESkfSry/V3mMVYnTpwoddD6ixcvlDdlro4KCgoQFRWFvXv3Ijk5GbNnz8b9+/eruiwiIiJ6i5Q5WPn5+akECXNzc1y9elX5/NGjR+jRo4e01b1FTpw4AQ8PDzg4OMDU1BTBwcHYvXt3VZdFREREb5EyB6tXzxgWdwaxomcVZ8yYAZlMhuHDh1doOa86cOAAQkJCYG9vD5lMhq1btxbbLzo6Gs7OzjA0NISvry9OnDihnHb79m04ODgonzs4OCA9PV3SOomIiEi7aXy5hdLIZLJyz5uYmIilS5eiSZMmpfY7fPgw8vPz1drPnTuHzMzMYufJzc2Fl5cXoqOjS1xubGwsoqKiMHHiRCQlJcHLywtBQUG4e/euZhtCRERE7yxJg1V55eTkoFevXli+fDnee++9EvspFApERkaiZ8+eKuO9Ll68CH9/f6xevbrY+YKDgzFlyhR07ty5xGXPnTsXAwcOREREBNzd3bFkyRIYGxtj5cqVAAB7e3uVI1Tp6emwt7fXdFOJiIioGtMoWJ07dw5//fUX/vrrLwghcOHCBeXzs2fPlruIyMhIdOzYEYGBgaUXq6ODP/74A8nJyejTpw8UCgWuXLkCf39/hIWFYfTo0eVaf15eHk6dOqWyfh0dHQQGBuLo0aMAAB8fH5w5cwbp6enIycnBjh07EBQUVOzyoqOj4e7ujhYtWpSrHiIiItJOGl3HKiAgQGUc1aeffgrg71OAQohynQr8+eefkZSUhMTExDL1t7e3x969e9GqVSv07NkTR48eRWBgIBYvXqzxuotkZWWhsLAQtra2Ku22tra4cOECAEBPTw9z5sxBu3btoFAoMHr0aFhZWRW7vMjISERGRip/rklERETvhjIHq7S0NMlXfvPmTXzzzTeIi4uDoaFhmedzdHTEmjVr0KZNG9SrVw8//PBDhcZ3lVVoaChCQ0MrfT1ERESkncocrJycnCRf+alTp3D37l28//77yrbCwkIcOHAAixYtwosXL6Crq6s2X2ZmJgYNGoSQkBAkJiZixIgRWLhwYbnrsLa2hq6urtrg98zMTNjZ2ZV7uURERPRuqdLB6wEBATh9+jRSUlKUj+bNm6NXr15ISUkpNlRlZWUhICAAbm5u2Lx5M+Lj4xEbG4tRo0aVuw59fX14e3sjPj5e2aZQKBAfHw8/P79yL5eIiIjeLRrfK1BKZmZmaNy4sUqbiYkJrKys1NqBv8NOcHAwnJycEBsbCz09Pbi7uyMuLg7+/v5wcHDAiBEj1ObLycnB5cuXlc/T0tKQkpICS0tLODo6AgCioqIQHh6O5s2bw8fHB/Pnz0dubi4iIiIk3moiIiKqrqo0WGlKR0cH06ZNQ6tWraCvr69s9/Lywp49e2BjY1PsfCdPnkS7du2Uz6OiogAA4eHhiImJAQB0794d9+7dw4QJE5CRkYGmTZti586dagPaiYiIiErCmzBXIt6EmYiISPu8kZswExEREVHpynQqsFmzZmW+nEFSUlKFCiIiIiLSVmUKVmFhYcp/P3/+HN9//z3c3d2Vv5g7duwYzp49i6+++qpSiiQiIiLSBmUKVhMnTlT+e8CAARg2bBj++9//qvW5efOmtNURERERaRGNB6/L5XKcPHkSrq6uKu2pqalo3rw5srOzJS1Qm3HwOhERkfZ5o4PXjYyMcPjwYbX2w4cPa3RbGiIiIqLqRuPrWA0fPhxDhgxBUlISfHx8AADHjx/HypUrMX78eMkLJCIiItIWGgersWPHol69eliwYAF++uknAICbmxtWrVqFbt26SV4gERERkbbgBUIrEcdYERERaZ83foHQR48eYcWKFfjXv/6FBw8eAPj7+lXp6enlWRwRERFRtaDxqcC//voLgYGBkMvluHbtGgYMGABLS0ts3rwZN27cwI8//lgZdRIRERG99TQ+YhUVFYW+ffsiNTVV5VeAn3zyCQ4cOCBpcURERETaRONglZiYiMGDB6u1Ozg4ICMjQ5KiiIiIiLSRxsHKwMAAjx8/Vmu/dOkSbGxsJCmKiIiISBtpHKxCQ0Px7bffIj8/HwAgk8lw48YNjBkzBl27dpW8QCIiIiJtoXGwmjNnDnJyclCzZk08e/YMbdq0Qf369WFmZoapU6dWRo1EREREWkHjXwXK5XLExcXh8OHD+PPPP5GTk4P3338fgYGBlVEfERERkdbQKFjl5+fDyMgIKSkpaNmyJVq2bFlZdRERERFpHY1OBdaoUQOOjo4oLCysrHqIiIiItJbGY6z+/e9/q1xxnYiIiIj+pvEYq0WLFuHy5cuwt7eHk5MTTExMVKYnJSVJVhwRERGRNtE4WIWFhVVCGURERETaTyaEEFVdRHVVkbtjExERUdWoyPe3xmOsiIiIiKh4Gp8KLCwsxLx587BhwwbcuHEDeXl5KtM5qJ2IiIjeVRofsZo8eTLmzp2L7t27Izs7G1FRUejSpQt0dHQwadKkSiiRiIiISDtoHKzWrl2L5cuXY+TIkdDT00OPHj2wYsUKTJgwAceOHauMGomIiIi0gsbBKiMjA56engAAU1NTZGdnAwA+/fRT/P7779JWR0RERKRFNA5WtWvXxp07dwAALi4u2L17NwAgMTERBgYG0lZHREREpEU0DladO3dGfHw8AODrr7/G+PHj4erqij59+qBfv36SF0hERESkLSp8HaujR4/i6NGjcHV1RUhIiFR1VQu8jhUREZH2qcj3t8aXW3iVn58f/Pz8KroYIiIiIq2ncbD68ccfS53ep0+fchfzttu+fTtGjhwJhUKBMWPGYMCAAVVdEhEREb1FND4V+N5776k8z8/Px9OnT6Gvrw9jY+Nqe4HQgoICuLu7IyEhAXK5HN7e3jhy5AisrKxKnIenAomIiLTPG72lzcOHD1UeOTk5uHjxIj766COsX79e08VpjRMnTsDDwwMODg4wNTVFcHCw8heRRERERIBE9wp0dXXFjBkz8M0332g87+LFi9GkSROYm5vD3Nwcfn5+2LFjhxRlKR04cAAhISGwt7eHTCbD1q1bi+0XHR0NZ2dnGBoawtfXFydOnFBOu337NhwcHJTPHRwckJ6eLmmdREREpN0kuwmznp4ebt++rfF8tWvXxowZM3Dq1CmcPHkS/v7+6NSpE86ePVts/8OHDyM/P1+t/dy5c8jMzCx2ntzcXHh5eSE6OrrEOmJjYxEVFYWJEyciKSkJXl5eCAoKwt27dzXeJiIiIno3aTx4/ddff1V5LoTAnTt3sGjRIrRs2VLjAl69RMPUqVOxePFiHDt2DB4eHirTFAoFIiMj4erqip9//hm6uroAgIsXL8Lf3x9RUVEYPXq02jqCg4MRHBxcah1z587FwIEDERERAQBYsmQJfv/9d6xcuRJjx46Fvb29yhGq9PR0+Pj4aLy9REREVH1pHKzCwsJUnstkMtjY2MDf3x9z5sypUDGFhYXYuHEjcnNzi72Eg46ODv744w+0bt0affr0wZo1a5CWlgZ/f3+EhYUVG6rKIi8vD6dOncK4ceNU1hUYGIijR48CAHx8fHDmzBmkp6dDLpdjx44dGD9+fLHLi46ORnR0NAoLC8tVDxEREWknjYOVQqGQvIjTp0/Dz88Pz58/h6mpKbZs2QJ3d/di+9rb22Pv3r1o1aoVevbsiaNHjyIwMBCLFy8u9/qzsrJQWFgIW1tblXZbW1tcuHABwN+nOufMmYN27dpBoVBg9OjRJf4iMDIyEpGRkcpfFRAREdG7ocIXCJVCw4YNkZKSguzsbPzyyy8IDw/H/v37SwxXjo6OWLNmDdq0aYN69erhhx9+gEwmq/Q6Q0NDERoaWunrISIiIu2kcbCKiooqc9+5c+eWqZ++vj7q168PAPD29kZiYiIWLFiApUuXFts/MzMTgwYNQkhICBITEzFixAgsXLiwzHW9ytraGrq6umqD3zMzM2FnZ1fu5RIREdG7ReNglZycjOTkZOTn56Nhw4YAgEuXLkFXVxfvv/++sl9FjiApFAq8ePGi2GlZWVkICAiAm5sbNm7ciEuXLqFt27YwMDDA//73v3KtT19fH97e3oiPj1eOIVMoFIiPj8fQoUPLuxlERET0jtE4WIWEhMDMzAyrV69WXoX94cOHiIiIQKtWrTBy5EiNljdu3DgEBwfD0dERT548wbp167Bv3z7s2rVLra9CoUBwcDCcnJwQGxsLPT09uLu7Iy4uDv7+/nBwcMCIESPU5svJycHly5eVz9PS0pCSkgJLS0s4OjoC+PtIXHh4OJo3bw4fHx/Mnz8fubm5yl8JEhEREb2W0JC9vb04c+aMWvvp06dFrVq1NF2c6Nevn3BychL6+vrCxsZGBAQEiN27d5fYf/fu3eLZs2dq7UlJSeLmzZvFzpOQkCAAqD3Cw8NV+i1cuFA4OjoKfX194ePjI44dO6bx9rwsOztbABDZ2dkVWg4RERG9ORX5/tb4XoFmZmb47bff0LZtW5X2hIQEhIaG4smTJ5IEvuqA9wokIiLSPm/0XoGdO3dGREQENm/ejFu3buHWrVvYtGkT+vfvjy5dumi6OCIiIqJqQ+MxVkuWLMGoUaPQs2dP5a1l9PT00L9/f8yePVvyAomIiIi0hcanAovk5ubiypUrAAAXFxeYmJhIWlh1wFOBRERE2ueNngosYmJigiZNmkAul+P69euVckV2IiIiIm1S5mC1cuVKtQt+Dho0CPXq1YOnpycaN26MmzdvSl4gERERkbYoc7BatmyZ8rpVALBz506sWrUKP/74IxITE2FhYYHJkydXSpFERERE2qDMg9dTU1PRvHlz5fNt27ahU6dO6NWrFwBg2rRpvJgmERERvdPKfMTq2bNnKgO4jhw5gtatWyuf16tXDxkZGdJWR0RERKRFyhysnJyccOrUKQB/36/v7NmzaNmypXJ6RkYG5HK59BUSERERaYkynwoMDw9HZGQkzp49i71796JRo0bw9vZWTj9y5AgaN25cKUUSERERaYMyB6vRo0fj6dOn2Lx5M+zs7LBx40aV6YcPH0aPHj0kL5CIiIhIW5T7AqH0erxAKBERkfapkguEEhEREZEqBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBIp8+UWihQWFiImJgbx8fG4e/cuFAqFyvS9e/dKVhwRERGRNtE4WH3zzTeIiYlBx44d0bhxY8hkssqoi4iIiEjraBysfv75Z2zYsAGffPJJZdRDREREpLU0HmOlr6+P+vXrV0YtRERERFpN42A1cuRILFiwALxgOxEREZEqjU8FHjp0CAkJCdixYwc8PDxQo0YNlembN2+WrDgiIiIibaJxsLKwsEDnzp0roxYiIiIiraZxsFq1alVl1EFERESk9XiBUCIiIiKJaHzECgB++eUXbNiwATdu3EBeXp7KtKSkJEkKIyIiItI2Gh+x+u677xAREQFbW1skJyfDx8cHVlZWuHr1KoKDgyujRiIiIiKtoHGw+v7777Fs2TIsXLgQ+vr6GD16NOLi4jBs2DBkZ2dXRo1EREREWkHjYHXjxg18+OGHAAAjIyM8efIEANC7d2+sX79e2uqIiIiItIjGwcrOzg4PHjwAADg6OuLYsWMAgLS0NF40lIiIiN5pGgcrf39//PrrrwCAiIgIjBgxAh9//DG6d+/O61sRERHRO00mNDzMpFAooFAooKf39w8Kf/75Zxw5cgSurq4YPHgw9PX1K6VQbfT48WPI5XJkZ2fD3Ny8qsshIiKiMqjI97fGwYrKjsGKiIhI+1Tk+7tcFwg9ePAgvvjiC/j5+SE9PR0AsGbNGhw6dKg8iyMiIiKqFjQOVps2bUJQUBCMjIyQnJyMFy9eAACys7Mxbdo0yQskIiIi0hYaB6spU6ZgyZIlWL58OWrUqKFsb9myJa+6TkRERO80jYPVxYsX0bp1a7V2uVyOR48eSVETERERkVYq13WsLl++rNZ+6NAh1KtXT5KiiIiIiLSRxsFq4MCB+Oabb3D8+HHIZDLcvn0ba9euxahRozBkyJDKqJGIiIhIK+hpOsPYsWOhUCgQEBCAp0+fonXr1jAwMMCoUaPw9ddfV0aNRERERFqh3NexysvLw+XLl5GTkwN3d3eYmppKXZvW43WsiIiItE9Fvr81PmJVRF9fH+7u7uWdnYiIiKjaKXOw6tevX5n6rVy5stzFEBEREWmzMgermJgYODk5oVmzZuBdcIiIiIjUlTlYDRkyBOvXr0daWhoiIiLwxRdfwNLSsjJrIyIiItIqZb7cQnR0NO7cuYPRo0fjt99+Q506ddCtWzfs2rWLR7CIiIiIUIFfBV6/fh0xMTH48ccfUVBQgLNnz/KXga/grwKJiIi0T0W+vzW+QKhyRh0dyGQyCCFQWFhY3sUQERERVRsaBasXL15g/fr1+Pjjj9GgQQOcPn0aixYtwo0bN3i0ioiIiN55ZR68/tVXX+Hnn39GnTp10K9fP6xfvx7W1taVWRsRERGRVinzGCsdHR04OjqiWbNmkMlkJfbbvHmzZMW9bbZv346RI0dCoVBgzJgxGDBgQKn9OcaKiIhI+7yRK6/36dOn1EBV3RUUFCAqKgoJCQmQy+Xw9vZG586dYWVlVdWlERER0VtCowuEvstOnDgBDw8PODg4AACCg4Oxe/du9OjRo4orIyIiordFuX8VKJXp06ejRYsWMDMzQ82aNREWFoaLFy9Kuo4DBw4gJCQE9vb2kMlk2Lp1a7H9oqOj4ezsDENDQ/j6+uLEiRPKabdv31aGKgBwcHBAenq6pHUSERGRdqvyYLV//35ERkbi2LFjiIuLQ35+Ptq3b4/c3Nxi+x8+fBj5+flq7efOnUNmZmax8+Tm5sLLywvR0dEl1hEbG4uoqChMnDgRSUlJ8PLyQlBQEO7evVu+DSMiIqJ3TpUHq507d6Jv377w8PCAl5cXYmJicOPGDZw6dUqtr0KhQGRkJHr27Kly7ayLFy/C398fq1evLnYdwcHBmDJlCjp37lxiHXPnzsXAgQMREREBd3d3LFmyBMbGxsqbStvb26scoUpPT4e9vX15N5uIiIiqoSoPVq/Kzs4GgGLvQ6ijo4M//vgDycnJ6NOnDxQKBa5cuQJ/f3+EhYVh9OjR5VpnXl4eTp06hcDAQJV1BQYG4ujRowAAHx8fnDlzBunp6cjJycGOHTsQFBRU7PKio6Ph7u6OFi1alKseIiIi0k5vVbBSKBQYPnw4WrZsicaNGxfbx97eHnv37sWhQ4fQs2dP+Pv7IzAwEIsXLy73erOyslBYWAhbW1uVdltbW2RkZAAA9PT0MGfOHLRr1w5NmzbFyJEjS/xFYGRkJM6dO4fExMRy10RERETap8y/CnwTIiMjcebMGRw6dKjUfo6OjlizZg3atGmDevXq4Ycffngjl4IIDQ1FaGhopa+HiIiItNNbc8Rq6NCh2L59OxISElC7du1S+2ZmZmLQoEEICQnB06dPMWLEiAqt29raGrq6umqD3zMzM2FnZ1ehZRMREdG7o8qDlRACQ4cOxZYtW7B3717UrVu31P5ZWVkICAiAm5sbNm/ejPj4eMTGxmLUqFHlrkFfXx/e3t6Ij49XtikUCsTHx8PPz6/cyyUiIqJ3S5WfCoyMjMS6deuwbds2mJmZKcc0yeVyGBkZqfRVKBQIDg6Gk5MTYmNjoaenB3d3d8TFxcHf3x8ODg7FHr3KycnB5cuXlc/T0tKQkpICS0tLODo6AgCioqIQHh6O5s2bw8fHB/Pnz0dubi4iIiIqceuJiIioOinzvQIrrYASxkatWrUKffv2VWuPi4tDq1atYGhoqNKenJwMGxubYk8j7tu3D+3atVNrDw8PV7mi/KJFizB79mxkZGSgadOm+O677+Dr66vZBr2E9wokIiLSPhX5/q7yYFWdMVgRERFpn4p8f1f5GCsiIiKi6oLBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisiIiIiCTCYEVEREQkEQYrIiIiIokwWBERERFJhMGKiIiISCIMVkREREQSYbAiIiIikgiDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisiIiIiCTCYEVEREQkEQYrIiIiIokwWBERERFJhMGKiIiISCIMVkREREQSYbAiIiIikgiDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisiIiIiCTCYEVEREQkEQYrIiIiIokwWBERERFJhMGKiIiISCIMVkREREQSYbAiIiIikgiDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisiIiIiCTCYEVEREQkEQYrIiIiIokwWBERERFJhMGKiIiISCIMVkREREQSYbAiIiIikgiDFREREZFEGKyIiIiIJMJgRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIiIgkwmBFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFavsX37djRs2BCurq5YsWJFVZdDREREbzG9qi7gbVZQUICoqCgkJCRALpfD29sbnTt3hpWVVVWXRkRERG8hHrEqxYkTJ+Dh4QEHBweYmpoiODgYu3fvruqyiIiI6C1VrYPVgQMHEBISAnt7e8hkMmzdulWtT3R0NJydnWFoaAhfX1+cOHFCOe327dtwcHBQPndwcEB6evqbKJ2IiIi0ULUOVrm5ufDy8kJ0dHSx02NjYxEVFYWJEyciKSkJXl5eCAoKwt27d99wpURERFQdVOsxVsHBwQgODi5x+ty5czFw4EBEREQAAJYsWYLff/8dK1euxNixY2Fvb69yhCo9PR0+Pj4lLu/Fixd48eKF8nl2djYA4PHjxxXdFCIiInpDir63hRCazyzeEQDEli1blM9fvHghdHV1VdqEEKJPnz4iNDRUCCFEfn6+qF+/vrh165Z48uSJaNCggcjKyipxHRMnThQA+OCDDz744IOPavC4efOmxnmjWh+xKk1WVhYKCwtha2ur0m5ra4sLFy4AAPT09DBnzhy0a9cOCoUCo0ePLvUXgePGjUNUVJTyuUKhwIMHD2BlZQWZTFbhmh8/fow6derg5s2bMDc3r/Dy3mbvyra+K9sJvDvbyu2sft6VbX1XthN4/bYKIfDkyRPY29trvOx3NliVVWhoKEJDQ8vU18DAAAYGBiptFhYWktdkbm5e7f/oi7wr2/qubCfw7mwrt7P6eVe29V3ZTqD0bZXL5eVaZrUevF4aa2tr6OrqIjMzU6U9MzMTdnZ2VVQVERERabN3Nljp6+vD29sb8fHxyjaFQoH4+Hj4+flVYWVERESkrar1qcCcnBxcvnxZ+TwtLQ0pKSmwtLSEo6MjoqKiEB4ejubNm8PHxwfz589Hbm6u8leCbxsDAwNMnDhR7XRjdfSubOu7sp3Au7Ot3M7q513Z1ndlO4HK3VbZ///FXLW0b98+tGvXTq09PDwcMTExAIBFixZh9uzZyMjIQNOmTfHdd9/B19f3DVdKRERE1UG1DlZEREREb9I7O8aKiIiISGoMVkREREQSYbAiIiIikgiDlRaJjo6Gs7MzDA0N4evrixMnTlR1SRUyffp0tGjRAmZmZqhZsybCwsJw8eJFlT5t27aFTCZTeXz55ZdVVHH5TZo0SW07GjVqpJz+/PlzREZGwsrKCqampujatavaNda0gbOzs9p2ymQyREZGAtDe/XngwAGEhITA3t4eMpkMW7duVZkuhMCECRNQq1YtGBkZITAwEKmpqSp9Hjx4gF69esHc3BwWFhbo378/cnJy3uBWlE1p25qfn48xY8bA09MTJiYmsLe3R58+fXD79m2VZRT3dzBjxow3vCWle90+7du3r9o2dOjQQaVPddinAIp9z8pkMsyePVvZRxv2aVm+U8ryWXvjxg107NgRxsbGqFmzJv75z3+ioKCgzHUwWGmJ2NhYREVFYeLEiUhKSoKXlxeCgoJw9+7dqi6t3Pbv34/IyEgcO3YMcXFxyM/PR/v27ZGbm6vSb+DAgbhz547yMWvWrCqquGI8PDxUtuPQoUPKaSNGjMBvv/2GjRs3Yv/+/bh9+za6dOlShdWWT2Jioso2xsXFAQA+++wzZR9t3J+5ubnw8vJCdHR0sdNnzZqF7777DkuWLMHx48dhYmKCoKAgPH/+XNmnV69eOHv2LOLi4rB9+3YcOHAAgwYNelObUGalbevTp0+RlJSE8ePHIykpCZs3b8bFixeLvTvFt99+q7Kfv/766zdRfpm9bp8CQIcOHVS2Yf369SrTq8M+BaCyjXfu3MHKlSshk8nQtWtXlX5v+z4ty3fK6z5rCwsL0bFjR+Tl5eHIkSNYvXo1YmJiMGHChLIXovHdBalK+Pj4iMjISOXzwsJCYW9vL6ZPn16FVUnr7t27AoDYv3+/sq1Nmzbim2++qbqiJDJx4kTh5eVV7LRHjx6JGjVqiI0bNyrbzp8/LwCIo0ePvqEKK8c333wjXFxchEKhEEJUj/0JqN7QXaFQCDs7OzF79mxl26NHj4SBgYFYv369EEKIc+fOCQAiMTFR2WfHjh1CJpOJ9PT0N1a7pl7d1uKcOHFCABDXr19Xtjk5OYl58+ZVbnESKm47w8PDRadOnUqcpzrv006dOgl/f3+VNm3bp0Kof6eU5bP2jz/+EDo6OiIjI0PZZ/HixcLc3Fy8ePGiTOvlESstkJeXh1OnTiEwMFDZpqOjg8DAQBw9erQKK5NWdnY2AMDS0lKlfe3atbC2tkbjxo0xbtw4PH36tCrKq7DU1FTY29ujXr166NWrF27cuAEAOHXqFPLz81X2b6NGjeDo6KjV+zcvLw8//fQT+vXrp3IT8uqyP4ukpaUhIyNDZf/J5XL4+voq99/Ro0dhYWGB5s2bK/sEBgZCR0cHx48ff+M1Syk7OxsymUztvqgzZsyAlZUVmjVrhtmzZ2t0KuVtsW/fPtSsWRMNGzbEkCFDcP/+feW06rpPMzMz8fvvv6N///5q07Rtn776nVKWz9qjR4/C09MTtra2yj5BQUF4/Pgxzp49W6b1Vusrr1cXWVlZKCwsVNnRAGBra4sLFy5UUVXSUigUGD58OFq2bInGjRsr23v27AknJyfY29vjr7/+wpgxY3Dx4kVs3ry5CqvVnK+vL2JiYtCwYUPcuXMHkydPRqtWrXDmzBlkZGRAX19f7YvJ1tYWGRkZVVOwBLZu3YpHjx6hb9++yrbqsj9fVrSPint/Fk3LyMhAzZo1Vabr6enB0tJSq/fx8+fPMWbMGPTo0UPlRrbDhg3D+++/D0tLSxw5cgTjxo3DnTt3MHfu3CqsVjMdOnRAly5dULduXVy5cgX/+te/EBwcjKNHj0JXV7fa7tPVq1fDzMxMbSiCtu3T4r5TyvJZm5GRUex7uWhaWTBY0VshMjISZ86cURl3BEBlvIKnpydq1aqFgIAAXLlyBS4uLm+6zHILDg5W/rtJkybw9fWFk5MTNmzYACMjoyqsrPL88MMPCA4Ohr29vbKtuuxP+nsge7du3SCEwOLFi1WmRUVFKf/dpEkT6OvrY/DgwZg+fbrW3C7l888/V/7b09MTTZo0gYuLC/bt24eAgIAqrKxyrVy5Er169YKhoaFKu7bt05K+U94EngrUAtbW1tDV1VX75UJmZibs7OyqqCrpDB06FNu3b0dCQgJq165dat+i2w29fA9IbWRhYYEGDRrg8uXLsLOzQ15eHh49eqTSR5v37/Xr17Fnzx4MGDCg1H7VYX8W7aPS3p92dnZqPzQpKCjAgwcPtHIfF4Wq69evIy4uTuVoVXF8fX1RUFCAa9euvZkCK0G9evVgbW2t/FutbvsUAA4ePIiLFy++9n0LvN37tKTvlLJ81trZ2RX7Xi6aVhYMVlpAX18f3t7eiI+PV7YpFArEx8fDz8+vCiurGCEEhg4dii1btmDv3r2oW7fua+dJSUkBANSqVauSq6tcOTk5uHLlCmrVqgVvb2/UqFFDZf9evHgRN27c0Nr9u2rVKtSsWRMdO3YstV912J9169aFnZ2dyv57/Pgxjh8/rtx/fn5+ePToEU6dOqXss3fvXigUCq27N2lRqEpNTcWePXtgZWX12nlSUlKgo6OjdupMm9y6dQv3799X/q1Wp31a5IcffoC3tze8vLxe2/dt3Kev+04py2etn58fTp8+rRKai/7z4O7uXuZCSAv8/PPPwsDAQMTExIhz586JQYMGCQsLC5VfLmibIUOGCLlcLvbt2yfu3LmjfDx9+lQIIcTly5fFt99+K06ePCnS0tLEtm3bRL169UTr1q2ruHLNjRw5Uuzbt0+kpaWJw4cPi8DAQGFtbS3u3r0rhBDiyy+/FI6OjmLv3r3i5MmTws/PT/j5+VVx1eVTWFgoHB0dxZgxY1TatXl/PnnyRCQnJ4vk5GQBQMydO1ckJycrfwk3Y8YMYWFhIbZt2yb++usv0alTJ1G3bl3x7Nkz5TI6dOggmjVrJo4fPy4OHTokXF1dRY8ePapqk0pU2rbm5eWJ0NBQUbt2bZGSkqLyvi36xdSRI0fEvHnzREpKirhy5Yr46aefhI2NjejTp08Vb5mq0rbzyZMnYtSoUeLo0aMiLS1N7NmzR7z//vvC1dVVPH/+XLmM6rBPi2RnZwtjY2OxePFitfm1ZZ++7jtFiNd/1hYUFIjGjRuL9u3bi5SUFLFz505hY2Mjxo0bV+Y6GKy0yMKFC4Wjo6PQ19cXPj4+4tixY1VdUoUAKPaxatUqIYQQN27cEK1btxaWlpbCwMBA1K9fX/zzn/8U2dnZVVt4OXTv3l3UqlVL6OvrCwcHB9G9e3dx+fJl5fRnz56Jr776Srz33nvC2NhYdO7cWdy5c6cKKy6/Xbt2CQDi4sWLKu3avD8TEhKK/VsNDw8XQvx9yYXx48cLW1tbYWBgIAICAtS2//79+6JHjx7C1NRUmJubi4iICPHkyZMq2JrSlbataWlpJb5vExIShBBCnDp1Svj6+gq5XC4MDQ2Fm5ubmDZtmkogeRuUtp1Pnz4V7du3FzY2NqJGjRrCyclJDBw4UO0/stVhnxZZunSpMDIyEo8ePVKbX1v26eu+U4Qo22fttWvXRHBwsDAyMhLW1tZi5MiRIj8/v8x1yP5/MURERERUQRxjRURERCQRBisiIiIiiTBYEREREUmEwYqIiIhIIgxWRERERBJhsCIiIiKSCIMVERERkUQYrIiIJObs7Iz58+dXdRlEVAUYrIhIq/Xt2xdhYWEAgLZt22L48OFvbN0xMTGwsLBQa09MTMSgQYPeWB1E9PbQq+oCiIjeNnl5edDX1y/3/DY2NhJWQ0TahEesiKha6Nu3L/bv348FCxZAJpNBJpPh2rVrAIAzZ84gODgYpqamsLW1Re/evZGVlaWct23bthg6dCiGDx8Oa2trBAUFAQDmzp0LT09PmJiYoE6dOvjqq6+Qk5MDANi3bx8iIiKQnZ2tXN+kSZMAqJ8KvHHjBjp16gRTU1OYm5ujW7duyMzMVE6fNGkSmjZtijVr1sDZ2RlyuRyff/45njx5ouzzyy+/wNPTE0ZGRrCyskJgYCByc3Mr6dUkovJisCKiamHBggXw8/PDwIEDcefOHdy5cwd16tTBo0eP4O/vj2bNmuHkyZPYuXMnMjMz0a1bN5X5V69eDX19fRw+fBhLliwBAOjo6OC7777D2bNnsXr1auzduxejR48GAHz44YeYP38+zM3NlesbNWqUWl0KhQKdOnXCgwcPsH//fsTFxeHq1avo3r27Sr8rV65g69at2L59O7Zv3479+/djxowZAIA7d+6gR48e6NevH86fP499+/ahS5cu4K1eid4+PBVIRNWCXC6Hvr4+jI2NYWdnp2xftGgRmjVrhmnTpinbVq5ciTp16uDSpUto0KABAMDV1RWzZs1SWebL47WcnZ0xZcoUfPnll/j++++hr68PuVwOmUymsr5XxcfH4/Tp00hLS0OdOnUAAD/++CM8PDyQmJiIFi1aAPg7gMXExMDMzAwA0Lt3b8THx2Pq1Km4c+cOCgoK0KVLFzg5OQEAPD09K/BqEVFl4RErIqrW/vzzTyQkJMDU1FT5aNSoEYC/jxIV8fb2Vpt3z549CAgIgIODA8zMzNC7d2/cv38fT58+LfP6z58/jzp16ihDFQC4u7vDwsIC58+fV7Y5OzsrQxUA1KpVC3fv3gUAeHl5ISAgAJ6envjss8+wfPlyPHz4sOwvAhG9MQxWRFSt5eTkICQkBCkpKSqP1NRUtG7dWtnPxMREZb5r167h008/RZMmTbBp0yacOnUK0dHRAP4e3C61GjVqqDyXyWRQKBQAAF1dXcTFxWHHjh1wd3fHwoUL0bBhQ6SlpUleBxFVDIMVEVUb+vr6KCwsVGl7//33cfbsWTg7O6N+/foqj1fD1MtOnToFhUKBOXPm4IMPPkCDBg1w+/bt167vVW5ubrh58yZu3rypbDt37hwePXoEd3f3Mm+bTCZDy5YtMXnyZCQnJ0NfXx9btmwp8/xE9GYwWBFRteHs7Izjx4/j2rVryMrKgkKhQGRkJB48eIAePXogMTERV65cwa5duxAREVFqKKpfvz7y8/OxcOFCXL16FWvWrFEOan95fTk5OYiPj0dWVlaxpwgDAwPh6emJXr16ISkpCSdOnECfPn3Qpk0bNG/evEzbdfz4cUybNg0nT57EjRs3sHnzZty7dw9ubm6avUBEVOkYrIio2hg1ahR0dXXh7u4OGxsb3LhxA/b29jh8+DAKCwvRvn17eHp6Yvjw4bCwsICOTskfgV5eXpg7dy5mzpyJxo0bY+3atZg+fbpKnw8//BBffvklunfvDhsbG7XB78DfR5q2bduG9957D61bt0ZgYCDq1auH2NjYMm+Xubk5Dhw4gE8++QQNGjTAf/7zH8yZMwfBwcFlf3GI6I2QCf5el4iIiEgSPGJFREREJBEGKyIiIiKJMFgRERERSYTBioiIiEgiDFZEREREEmGwIiIiIpIIgxURERGRRBisiIiIiCTCYEVEREQkEQYrIiIiIokwWBERERFJhMGKiIiISCL/D0PSkjFMikifAAAAAElFTkSuQmCC", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" }, { "name": "stdout", "output_type": "stream", "text": [ "Test MSE: nan\n", "Standard Error of MSE: nan\n" ] } ], "source": [ "# Set the learning rate and run the model\n", "alpha = 0.1\n", "run(alpha)" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.7" } }, "nbformat": 4, "nbformat_minor": 2 }